diff --git a/.github/workflows/cargo.yml b/.github/workflows/cargo.yml index c73465e6596..68ef6257adc 100644 --- a/.github/workflows/cargo.yml +++ b/.github/workflows/cargo.yml @@ -15,7 +15,6 @@ on: [push, pull_request] env: CARGO_INCREMENTAL: 0 CI: "1" - SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1 jobs: fmt: @@ -34,9 +33,9 @@ jobs: steps: - uses: actions/checkout@v2 -# - uses: actions/setup-node@v2-beta -# with: -# node-version: '12' + - uses: actions/setup-node@v2-beta + with: + node-version: "12" - name: Checkout submodules shell: bash @@ -51,6 +50,16 @@ jobs: npm i browserslist regenerator-runtime sourcemap-validator progress npm i -g jest + - name: Install llvm + if: matrix.os == 'windows-latest' + run: choco install -y llvm + + - name: Set llvm path + if: matrix.os == 'windows-latest' + uses: allenevans/set-env@v1.0.0 + with: + LIBCLANG_PATH: 'C:\\Program Files\\LLVM\\bin' + - name: Cache uses: actions/cache@v2 with: @@ -63,7 +72,6 @@ jobs: - name: Run cargo check for all targets run: cargo check --color always --all --all-targets - - name: Download typescript parser test suite run: | git clone --depth 1 https://github.com/swc-project/ts-parser-test-ref.git ecmascript/parser/tests/typescript/tsc @@ -71,7 +79,7 @@ jobs: - name: Run cargo test run: | export PATH="$PATH:$HOME/npm/bin" - cargo test --color always --all --exclude swc_ecma_parser + cargo test --color always --all --exclude node --exclude wasm # deploy-docs: diff --git a/.github/workflows/compilation.yml b/.github/workflows/compilation.yml index d318d19bc8e..871b5fb5c2d 100644 --- a/.github/workflows/compilation.yml +++ b/.github/workflows/compilation.yml @@ -15,7 +15,6 @@ on: [push, pull_request] env: CARGO_INCREMENTAL: 0 CI: "1" - SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1 jobs: check: diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index f1d75de4e21..992eebf761a 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -5,7 +5,6 @@ on: [push, pull_request] env: CARGO_INCREMENTAL: 0 CI: "1" - SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1 jobs: integration-test: @@ -26,13 +25,23 @@ jobs: **/target/ key: ${{ runner.os }}-cargo-integration + - name: Set platform name + run: | + export NODE_PLATFORM_NAME=$(node -e "console.log(require('os').platform())") + echo "::set-env name=PLATFORM_NAME::$NODE_PLATFORM_NAME" + shell: bash + - name: Prepare run: | npm config set prefix ~/npm npm i export PATH="$PATH:$HOME/npm/bin" - npx tsc - npx neon build --release + + npm run build + cp native/node.$PLATFORM_NAME.node swc.$PLATFORM_NAME.node + cp native/node.$PLATFORM_NAME.node scripts/npm/core-$PLATFORM_NAME/swc.node + (cd scripts/npm/core-$PLATFORM_NAME && npm link) + npm i -g @swc/cli npm link @@ -51,6 +60,7 @@ jobs: (cd integration-tests/three-js/build/test && qunit -r failonlyreporter unit/three.source.unit.js) + # terser: contains with statement in test # Rome.js: I forgot the cause, but it didn't work. # jQuery: browser only (window.document is required) diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml deleted file mode 100644 index 9d1e6652306..00000000000 --- a/.github/workflows/node.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Node binding - -on: [push, pull_request] - -env: - CARGO_INCREMENTAL: 0 - CI: "1" - SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1 - -jobs: - integration-test: - name: test - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - uses: actions/setup-node@v2-beta - with: - node-version: '12' - - - name: Install node dependencies - run: | - npm config set prefix ~/npm - npm i - - - name: Build node module - run: | - export PATH="$PATH:$HOME/npm/bin" - npx tsc - npx neon build - - - name: Test - run: | - export PATH="$PATH:$HOME/npm/bin" - npx jest node-swc/__tests__ diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000000..62bd7ce314a --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,114 @@ +name: Publish + +on: + create: + tags: + - v* +# on: [push, pull_request] + +env: + CARGO_INCREMENTAL: 0 + CI: "1" + +jobs: + build: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + + name: Build - ${{ matrix.os }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + + - name: Setup node + uses: actions/setup-node@v1 + with: + node-version: 12 + + - name: Cache + uses: actions/cache@v2 + with: + path: | + ~/.cargo/ + **/target/ + key: ${{ runner.os }}-publish-integration + + - name: Set platform name + run: | + export NODE_PLATFORM_NAME=$(node -e "console.log(require('os').platform())") + echo "::set-env name=PLATFORM_NAME::$NODE_PLATFORM_NAME" + shell: bash + + - name: Install llvm + if: matrix.os == 'windows-latest' + run: choco install -y llvm + + - name: Set llvm path + if: matrix.os == 'windows-latest' + uses: allenevans/set-env@v1.0.0 + with: + LIBCLANG_PATH: 'C:\\Program Files\\LLVM\\bin' + + - name: Install node dependencies + run: | + npm i + + - name: Build + shell: bash + run: | + npm run build + cp native/node.$PLATFORM_NAME.node swc.$PLATFORM_NAME.node + cp native/node.$PLATFORM_NAME.node node.$PLATFORM_NAME.node + cp native/node.$PLATFORM_NAME.node scripts/npm/core-$PLATFORM_NAME/swc.node + (cd scripts/npm/core-$PLATFORM_NAME && npm link) + + - name: Upload artifact + uses: actions/upload-artifact@v2 + with: + name: bindings + path: node.${{ env.PLATFORM_NAME }}.node + + - name: List packages + run: ls -R ./scripts/npm/ + shell: bash + + - name: Test bindings + run: | + npm link @swc/core-$PLATFORM_NAME + npm test + + publish: + name: npm + runs-on: ubuntu-latest + needs: + - build + steps: + - uses: actions/checkout@v2 + + - name: Setup node + uses: actions/setup-node@v1 + with: + node-version: 12 + + # Do not cache node_modules, or yarn workspace links broken + - name: Install dependencies + run: npm i + + - name: Download all artifacts + uses: actions/download-artifact@v2 + with: + path: ./native + + - name: List binaries + run: ls -R ./native/ + shell: bash + + - name: Move binaries + shell: bash + run: | + cp ./native/bindings/*.node ./native + + - uses: JS-DevTools/npm-publish@v1 + with: + token: ${{ secrets.NPM_TOKEN }} diff --git a/.npmignore b/.npmignore index de680478566..7c05d0c5d41 100644 --- a/.npmignore +++ b/.npmignore @@ -20,4 +20,13 @@ wasm/ **/benches/ **/target/ *.svg -integration-tests/ \ No newline at end of file +integration-tests/ + +# napi is cool +*.rs +/scripts/npm +/native/ +/src +/.vscode +/ecmascript/transforms/src/helpers +/node-swc/__tests__/ \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 2aac348d61a..fa81039d8b7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -13,7 +13,7 @@ "editor.formatOnSave": true }, "[typescript]": { - "editor.formatOnSave": false + "editor.formatOnSave": true }, "rust.rustflags": "--cfg procmacro2_semver_exempt", // Important diff --git a/ecmascript/parser/tests/jsx/errors/nested-fragment-unclosed/input.js.stderr b/ecmascript/parser/tests/jsx/errors/nested-fragment-unclosed/input.js.stderr index 3e6d499653f..20bedfdc447 100644 --- a/ecmascript/parser/tests/jsx/errors/nested-fragment-unclosed/input.js.stderr +++ b/ecmascript/parser/tests/jsx/errors/nested-fragment-unclosed/input.js.stderr @@ -1,6 +1,6 @@ -error: Unterminated JSX contents - --> $DIR/tests/jsx/errors/nested-fragment-unclosed/input.js:1:8 +error: Unexpected eof + --> $DIR/tests/jsx/errors/nested-fragment-unclosed/input.js:1:9 | 1 | <><> - | ^ + | ^ diff --git a/ecmascript/parser/tests/jsx/errors/unclosed-tag/input.js.stderr b/ecmascript/parser/tests/jsx/errors/unclosed-tag/input.js.stderr index ea10cc240ee..cf85e945f4e 100644 --- a/ecmascript/parser/tests/jsx/errors/unclosed-tag/input.js.stderr +++ b/ecmascript/parser/tests/jsx/errors/unclosed-tag/input.js.stderr @@ -1,6 +1,6 @@ -error: Unterminated JSX contents - --> $DIR/tests/jsx/errors/unclosed-tag/input.js:1:6 +error: Unexpected eof + --> $DIR/tests/jsx/errors/unclosed-tag/input.js:1:10 | 1 | yes - | ^^^^ + | ^ diff --git a/ecmascript/parser/tests/jsx/errors/unterminated-string/input.js.stderr b/ecmascript/parser/tests/jsx/errors/unterminated-string/input.js.stderr index fc8feb9d31d..74c709fb147 100644 --- a/ecmascript/parser/tests/jsx/errors/unterminated-string/input.js.stderr +++ b/ecmascript/parser/tests/jsx/errors/unterminated-string/input.js.stderr @@ -1,6 +1,6 @@ -error: Unterminated string constant - --> $DIR/tests/jsx/errors/unterminated-string/input.js:1:10 +error: Unexpected eof + --> $DIR/tests/jsx/errors/unterminated-string/input.js:1:12 | 1 | $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:17 +error: Unterminated string constant + --> $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:19 | 1 | 'use strict'; ('\1') - | ^^ + | ^^ diff --git a/ecmascript/parser/tests/test262-error-references/fail/dc431bcf293513a0.js.stderr b/ecmascript/parser/tests/test262-error-references/fail/dc431bcf293513a0.js.stderr index 0c6b6970905..8d912ad35ed 100644 --- a/ecmascript/parser/tests/test262-error-references/fail/dc431bcf293513a0.js.stderr +++ b/ecmascript/parser/tests/test262-error-references/fail/dc431bcf293513a0.js.stderr @@ -1,6 +1,6 @@ -error: Unterminated string constant - --> $DIR/tests/test262-parser/fail/dc431bcf293513a0.js:1:2 +error: Unexpected eof + --> $DIR/tests/test262-parser/fail/dc431bcf293513a0.js:1:4 | 1 | (') - | ^^ + | ^ diff --git a/ecmascript/parser/tests/test262-error-references/fail/e4963d9605864d9a.js.stderr b/ecmascript/parser/tests/test262-error-references/fail/e4963d9605864d9a.js.stderr index eccc91f4457..76ceb4d6d21 100644 --- a/ecmascript/parser/tests/test262-error-references/fail/e4963d9605864d9a.js.stderr +++ b/ecmascript/parser/tests/test262-error-references/fail/e4963d9605864d9a.js.stderr @@ -1,8 +1,8 @@ -error: Expected ), got - --> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:15 +error: Unexpected eof + --> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:16 | 1 | ([(a = b)] = [] - | ^ + | ^ error: Not a pattern --> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:3 diff --git a/ecmascript/parser/tests/test262-error-references/fail/e5fabf7fc4ae5dea.js.stderr b/ecmascript/parser/tests/test262-error-references/fail/e5fabf7fc4ae5dea.js.stderr index 6b9520057d7..1d3130e395e 100644 --- a/ecmascript/parser/tests/test262-error-references/fail/e5fabf7fc4ae5dea.js.stderr +++ b/ecmascript/parser/tests/test262-error-references/fail/e5fabf7fc4ae5dea.js.stderr @@ -1,6 +1,6 @@ -error: Parenthesized expression cannot contain spread operator - --> $DIR/tests/test262-parser/fail/e5fabf7fc4ae5dea.js:1:7 +error: Unexpected eof + --> $DIR/tests/test262-parser/fail/e5fabf7fc4ae5dea.js:1:9 | 1 | (a,...a)/*‪*/ - | ^ + | ^ diff --git a/ecmascript/parser/tests/test262-error-references/fail/f06a0e67a0041175.js.stderr b/ecmascript/parser/tests/test262-error-references/fail/f06a0e67a0041175.js.stderr index 48294c9e838..dc616bb3f65 100644 --- a/ecmascript/parser/tests/test262-error-references/fail/f06a0e67a0041175.js.stderr +++ b/ecmascript/parser/tests/test262-error-references/fail/f06a0e67a0041175.js.stderr @@ -1,6 +1,6 @@ -error: Unterminated template - --> $DIR/tests/test262-parser/fail/f06a0e67a0041175.js:1:1 +error: Unexpected eof + --> $DIR/tests/test262-parser/fail/f06a0e67a0041175.js:1:6 | 1 | `test - | ^^^^^ + | ^ diff --git a/ecmascript/parser/tests/test262.rs b/ecmascript/parser/tests/test262.rs index 597b983b22c..e51acdd0827 100644 --- a/ecmascript/parser/tests/test262.rs +++ b/ecmascript/parser/tests/test262.rs @@ -117,6 +117,7 @@ fn error_tests(tests: &mut Vec) -> Result<(), io::Error> { "15a6123f6b825c38.js", "3bc2b27a7430f818.js", // Tmporarily ignored + "2fa321f0374c7017.js", "3dbb6e166b14a6c0.js", "66e383bfd18e66ab.js", "78c215fabdf13bae.js", diff --git a/native/Cargo.toml b/native/Cargo.toml index 64899683488..3f0af58f5bb 100644 --- a/native/Cargo.toml +++ b/native/Cargo.toml @@ -1,32 +1,37 @@ [package] -name = "node-swc" -version = "0.1.0" -authors = ["강동윤 "] -license = "MIT" +authors = ["강동윤 "] build = "build.rs" -exclude = ["artifacts.json", "index.node"] edition = "2018" +exclude = ["artifacts.json", "index.node"] +license = "MIT" +name = "node" publish = false +version = "0.1.0" [lib] -name = "ffi" crate-type = ["cdylib"] [build-dependencies] -neon-build = "0.4.0" +napi-build = "0.2.1" [dependencies] -swc = { path = "../" } -swc_bundler = { path = "../bundler" } -swc_common = { path = "../common", features = ["tty-emitter", "sourcemap"] } -swc_ecma_ast = { path = "../ecmascript/ast" } -swc_ecma_parser = { path = "../ecmascript/parser" } -spack = { path = "../spack" } +anyhow = "1" backtrace = "0.3" fxhash = "0.2" -anyhow = "1" -serde_json = "1" -neon = "0.4.0" -neon-serde = "0.4.0" +napi = "0.4.13" +napi-derive = "0.4.0" path-clean = "0.1" -serde = { version = "1", features = ["derive"] } \ No newline at end of file +serde = {version = "1", features = ["derive"]} +serde_json = "1" +spack = {path = "../spack"} +swc = {path = "../"} +swc_bundler = {path = "../bundler"} +swc_common = {path = "../common", features = ["tty-emitter", "sourcemap"]} +swc_ecma_ast = {path = "../ecmascript/ast"} +swc_ecma_parser = {path = "../ecmascript/parser"} + +[target.'cfg(all(unix, not(target_env = "musl")))'.dependencies] +jemallocator = {version = "0.3", features = ["disable_initial_exec_tls"]} + +[target.'cfg(windows)'.dependencies] +mimalloc = {version = "0.1"} diff --git a/native/build.rs b/native/build.rs index 687a6619460..9fc23678893 100644 --- a/native/build.rs +++ b/native/build.rs @@ -1,7 +1,5 @@ -extern crate neon_build; +extern crate napi_build; fn main() { - neon_build::setup(); // must be called in build.rs - - // add project-specific build logic here... + napi_build::setup(); } diff --git a/native/src/bundle.rs b/native/src/bundle.rs index d68d82abef2..81345c88fc2 100644 --- a/native/src/bundle.rs +++ b/native/src/bundle.rs @@ -1,7 +1,11 @@ -use crate::JsCompiler; -use anyhow::{bail, Error}; +use crate::{ + get_compiler, + napi_serde::serialize, + util::{CtxtExt, MapErr}, +}; +use anyhow::bail; use fxhash::FxHashMap; -use neon::prelude::*; +use napi::{CallContext, Env, JsObject, Status, Task}; use serde::Deserialize; use spack::resolvers::NodeResolver; use std::{ @@ -33,10 +37,9 @@ struct BundleTask { impl Task for BundleTask { type Output = FxHashMap; - type Error = Error; - type JsEvent = JsValue; + type JsValue = JsObject; - fn perform(&self) -> Result { + fn compute(&mut self) -> napi::Result { let res = catch_unwind(AssertUnwindSafe(|| { let bundler = Bundler::new( self.swc.globals(), @@ -86,7 +89,9 @@ impl Task for BundleTask { }, ); - let result = bundler.bundle(self.config.static_items.config.entry.clone().into())?; + let result = bundler + .bundle(self.config.static_items.config.entry.clone().into()) + .convert_err()?; let result = result .into_iter() @@ -121,7 +126,8 @@ impl Task for BundleTask { Ok((k, output)) }) }) - .collect::>()?; + .collect::>() + .convert_err()?; Ok(result) })); @@ -132,74 +138,47 @@ impl Task for BundleTask { }; if let Some(s) = err.downcast_ref::() { - bail!("panic detected: {}", s); + return Err(napi::Error::new( + Status::GenericFailure, + format!("panic detected: {}", s), + )); } - bail!("panic detected") + Err(napi::Error::new( + Status::GenericFailure, + format!("panic detected"), + )) } - fn complete( - self, - mut cx: TaskContext, - result: Result, - ) -> JsResult { - match result { - Ok(v) => Ok(neon_serde::to_value(&mut cx, &v)?.upcast()), - Err(err) => cx.throw_error(format!("{:?}", err)), - } + fn resolve(&self, env: &mut Env, output: Self::Output) -> napi::Result { + serialize(env, &output)?.coerce_to_object() } } -pub(crate) fn bundle(mut cx: MethodContext) -> JsResult { - let c: Arc; - let this = cx.this(); - { - let guard = cx.lock(); - let compiler = this.borrow(&guard); - c = compiler.clone(); - } +#[js_function(1)] +pub(crate) fn bundle(cx: CallContext) -> napi::Result { + let c: Arc = get_compiler(&cx); - let undefined = cx.undefined(); + let static_items: StaticConfigItem = cx.get_deserialized(0)?; - let opt = cx.argument::(0)?; - let callback = cx.argument::(1)?; - let static_items: StaticConfigItem = neon_serde::from_value(&mut cx, opt.upcast())?; + let loader = Box::new(spack::loaders::swc::SwcLoader::new( + c.clone(), + static_items + .config + .options + .as_ref() + .cloned() + .unwrap_or_else(|| { + serde_json::from_value(serde_json::Value::Object(Default::default())).unwrap() + }), + )); - let loader = opt - .get(&mut cx, "loader")? - .downcast::() - .map(|f| { - let handler = EventHandler::new(&mut cx, undefined, f); - // - Box::new(spack::loaders::neon::NeonLoader { - swc: c.clone(), - handler, - }) as Box - }) - .unwrap_or_else(|_| { - Box::new(spack::loaders::swc::SwcLoader::new( - c.clone(), - static_items - .config - .options - .as_ref() - .cloned() - .unwrap_or_else(|| { - serde_json::from_value(serde_json::Value::Object(Default::default())) - .unwrap() - }), - )) - }); - - BundleTask { + cx.env.spawn(BundleTask { swc: c.clone(), config: ConfigItem { loader, resolver: Box::new(NodeResolver::new()) as Box<_>, static_items, }, - } - .schedule(callback); - - Ok(cx.undefined().upcast()) + }) } diff --git a/native/src/lib.rs b/native/src/lib.rs index 61bf67c9cb5..87f8211a9ca 100644 --- a/native/src/lib.rs +++ b/native/src/lib.rs @@ -1,24 +1,56 @@ #![recursion_limit = "2048"] -extern crate neon; -extern crate neon_serde; -extern crate path_clean; -extern crate serde; -extern crate swc; +#[macro_use] +extern crate napi; +#[macro_use] +extern crate napi_derive; + +#[cfg(all(unix, not(target_env = "musl")))] +#[global_allocator] +static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; + +#[cfg(windows)] +#[global_allocator] +static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; -use anyhow::Error; use backtrace::Backtrace; -use neon::prelude::*; +use napi::{CallContext, Env, JsFunction, JsObject, JsUndefined, Module}; +use napi_serde::serialize; use std::{env, panic::set_hook, sync::Arc}; use swc::{Compiler, TransformOutput}; -use swc_common::{self, errors::Handler, FilePathMapping, SourceMap}; +use swc_common::{ + self, + errors::{ColorConfig, Handler}, + sync::Lazy, + FilePathMapping, SourceMap, +}; mod bundle; +mod napi_serde; mod parse; mod print; mod transform; +mod util; -fn init(_cx: MethodContext) -> NeonResult { +// #[cfg(all(unix, not(target_env = "musl")))] +// #[global_allocator] +// static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; + +static COMPILER: Lazy> = Lazy::new(|| { + let cm = Arc::new(SourceMap::new(FilePathMapping::empty())); + let handler = Arc::new(Handler::with_tty_emitter( + ColorConfig::Always, + true, + false, + Some(cm.clone()), + )); + + Arc::new(Compiler::new(cm.clone(), handler)) +}); + +register_module!(swc, init); + +fn init(m: &mut Module) -> napi::Result<()> { if cfg!(debug_assertions) || env::var("SWC_DEBUG").unwrap_or_else(|_| String::new()) == "1" { set_hook(Box::new(|_panic_info| { let backtrace = Backtrace::new(); @@ -26,85 +58,43 @@ fn init(_cx: MethodContext) -> NeonResult { })); } - let cm = Arc::new(SourceMap::new(FilePathMapping::empty())); + m.create_named_method("define", define_compiler_class)?; - let handler = Arc::new(Handler::with_tty_emitter( - swc_common::errors::ColorConfig::Always, - true, - false, - Some(cm.clone()), - )); + m.create_named_method("transform", transform::transform)?; + m.create_named_method("transformSync", transform::transform_sync)?; + m.create_named_method("transformFile", transform::transform_file)?; + m.create_named_method("transformFileSync", transform::transform_file_sync)?; - let c = Compiler::new(cm.clone(), handler); + m.create_named_method("parse", parse::parse)?; + m.create_named_method("parseSync", parse::parse_sync)?; + m.create_named_method("parseFile", parse::parse_file)?; + m.create_named_method("parseFileSync", parse::parse_file_sync)?; - Ok(Arc::new(c)) + m.create_named_method("print", print::print)?; + m.create_named_method("printSync", print::print_sync)?; + + m.create_named_method("bundle", bundle::bundle)?; + + Ok(()) } -pub fn complete_output<'a>( - mut cx: impl Context<'a>, - result: Result, -) -> JsResult<'a, JsValue> { - match result { - Ok(output) => Ok(neon_serde::to_value(&mut cx, &output)?), - Err(err) => cx.throw_error(format!("{:?}", err)), - } +fn get_compiler(_ctx: &CallContext) -> Arc { + COMPILER.clone() +} + +#[js_function] +fn define_compiler_class(ctx: CallContext) -> napi::Result { + ctx.env.define_class("Compiler", construct_compiler, vec![]) +} + +#[js_function] +fn construct_compiler(ctx: CallContext) -> napi::Result { + // TODO: Assign swc::Compiler + ctx.env.get_undefined() +} + +pub fn complete_output(env: &Env, output: TransformOutput) -> napi::Result { + serialize(&env, &output)?.coerce_to_object() } pub type ArcCompiler = Arc; - -declare_types! { - pub class JsCompiler for ArcCompiler { - init(cx) { - init(cx) - } - - method transform(cx) { - transform::transform(cx) - } - - method transformSync(cx) { - transform::transform_sync(cx) - } - - method transformFile(cx) { - transform::transform_file(cx) - } - - method transformFileSync(cx) { - transform::transform_file_sync(cx) - } - - method parse(cx) { - parse::parse(cx) - } - - method parseSync(cx) { - parse::parse_sync(cx) - } - - method parseFile(cx) { - parse::parse_file(cx) - } - - method parseFileSync(cx) { - parse::parse_file_sync(cx) - } - - method print(cx) { - print::print(cx) - } - - method printSync(cx) { - print::print_sync(cx) - } - - method bundle(cx) { - bundle::bundle(cx) - } - } -} - -register_module!(mut cx, { - cx.export_class::("Compiler")?; - Ok(()) -}); diff --git a/native/src/napi_serde/mod.rs b/native/src/napi_serde/mod.rs new file mode 100644 index 00000000000..4b4572b47ce --- /dev/null +++ b/native/src/napi_serde/mod.rs @@ -0,0 +1,56 @@ +//! Serde for napi. +//! +//! THis will be extracted as a standalone crate in future. + +pub use self::ser::serialize; +use std::{fmt, fmt::Display}; + +mod ser; + +#[derive(Debug)] +pub(crate) enum Error { + Normal(anyhow::Error), + Napi(napi::Error), +} + +impl Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Error::Normal(v) => Display::fmt(v, f), + Error::Napi(v) => Display::fmt(&v.reason, f), + } + } +} + +impl serde::ser::Error for Error { + fn custom(msg: T) -> Self { + anyhow::Error::msg(msg.to_string()).into() + } +} + +impl serde::de::Error for Error { + fn custom(msg: T) -> Self { + anyhow::Error::msg(msg.to_string()).into() + } +} + +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Error::Normal(v) => v.source(), + Error::Napi(_) => None, + } + } +} + +impl From for Error { + fn from(e: anyhow::Error) -> Self { + Self::Normal(e) + } +} + +impl From for Error { + fn from(e: napi::Error) -> Self { + Self::Napi(e) + } +} diff --git a/native/src/napi_serde/ser.rs b/native/src/napi_serde/ser.rs new file mode 100644 index 00000000000..5652f250f37 --- /dev/null +++ b/native/src/napi_serde/ser.rs @@ -0,0 +1,403 @@ +use super::Error; +use napi::{Env, JsObject, JsUnknown, Status}; +use serde::{ + ser::{ + SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple, + SerializeTupleStruct, SerializeTupleVariant, + }, + Serialize, Serializer, +}; + +pub fn serialize(env: &Env, node: &T) -> napi::Result +where + T: Serialize, +{ + let s = Ser { env }; + match node.serialize(s) { + Ok(v) => Ok(v), + Err(err) => match err { + Error::Normal(v) => Err(napi::Error::new(Status::GenericFailure, format!("{:?}", v))), + Error::Napi(err) => Err(err), + }, + } +} + +struct Ser<'env> { + env: &'env Env, +} + +#[doc(hidden)] +struct ArraySerializer<'env> { + env: &'env Env, + array: JsObject, +} + +#[doc(hidden)] +struct TupleVariantSerializer<'env> { + outter_object: JsObject, + inner: ArraySerializer<'env>, +} + +#[doc(hidden)] +struct MapSerializer<'env> { + env: &'env Env, + object: JsObject, + key_holder: JsObject, +} + +#[doc(hidden)] +struct StructSerializer<'env> { + env: &'env Env, + object: JsObject, +} + +#[doc(hidden)] +struct StructVariantSerializer<'env> { + outer_object: JsObject, + inner: StructSerializer<'env>, +} + +impl<'env> Serializer for Ser<'env> { + type Ok = JsUnknown; + type Error = Error; + + type SerializeSeq = ArraySerializer<'env>; + type SerializeTuple = ArraySerializer<'env>; + type SerializeTupleStruct = ArraySerializer<'env>; + + type SerializeTupleVariant = TupleVariantSerializer<'env>; + type SerializeMap = MapSerializer<'env>; + type SerializeStruct = StructSerializer<'env>; + type SerializeStructVariant = StructVariantSerializer<'env>; + + fn serialize_bool(self, v: bool) -> Result { + Ok(self.env.get_boolean(v)?.into_unknown()?) + } + + fn serialize_i8(self, v: i8) -> Result { + Ok(self.env.create_int32(v as _)?.into_unknown()?) + } + + fn serialize_i16(self, v: i16) -> Result { + Ok(self.env.create_int32(v as _)?.into_unknown()?) + } + + fn serialize_i32(self, v: i32) -> Result { + Ok(self.env.create_int32(v)?.into_unknown()?) + } + + fn serialize_i64(self, v: i64) -> Result { + Ok(self.env.create_int64(v)?.into_unknown()?) + } + + fn serialize_u8(self, v: u8) -> Result { + Ok(self.env.create_uint32(v as _)?.into_unknown()?) + } + + fn serialize_u16(self, v: u16) -> Result { + Ok(self.env.create_uint32(v as _)?.into_unknown()?) + } + + fn serialize_u32(self, v: u32) -> Result { + Ok(self.env.create_uint32(v as _)?.into_unknown()?) + } + + fn serialize_u64(self, v: u64) -> Result { + Ok(self.env.create_int64(v as _)?.into_unknown()?) + } + + fn serialize_f32(self, v: f32) -> Result { + Ok(self.env.create_double(v as _)?.into_unknown()?) + } + + fn serialize_f64(self, v: f64) -> Result { + Ok(self.env.create_double(v as _)?.into_unknown()?) + } + + fn serialize_char(self, v: char) -> Result { + Ok(self + .env + .create_string_from_std(v.to_string())? + .into_unknown()?) + } + + fn serialize_str(self, v: &str) -> Result { + Ok(self.env.create_string(v)?.into_unknown()?) + } + + fn serialize_bytes(self, v: &[u8]) -> Result { + Ok(self + .env + .create_buffer_with_data(v.to_vec())? + .into_unknown()?) + } + + fn serialize_none(self) -> Result { + Ok(self.env.get_null()?.into_unknown()?) + } + + fn serialize_some(self, value: &T) -> Result + where + T: serde::Serialize, + { + value.serialize(self) + } + + fn serialize_unit(self) -> Result { + Ok(self.env.get_null()?.into_unknown()?) + } + + fn serialize_unit_struct(self, _name: &'static str) -> Result { + Ok(self.env.get_null()?.into_unknown()?) + } + + fn serialize_unit_variant( + self, + _name: &'static str, + _variant_index: u32, + variant: &'static str, + ) -> Result { + self.serialize_str(variant) + } + + fn serialize_newtype_struct( + self, + _name: &'static str, + value: &T, + ) -> Result + where + T: serde::Serialize, + { + value.serialize(self) + } + + fn serialize_newtype_variant( + self, + _name: &'static str, + _variant_index: u32, + variant: &'static str, + value: &T, + ) -> Result + where + T: serde::Serialize, + { + let mut obj = self.env.create_object()?; + let value = serialize(&self.env, &value)?; + obj.set_named_property(variant, value)?; + Ok(obj.into_unknown()?) + } + + fn serialize_seq(self, len: Option) -> Result { + Ok(ArraySerializer { + env: self.env, + array: self.env.create_array_with_length(len.unwrap_or(0))?, + }) + } + + fn serialize_tuple(self, len: usize) -> Result { + Ok(ArraySerializer { + env: self.env, + array: self.env.create_array_with_length(len)?, + }) + } + + fn serialize_tuple_struct( + self, + _name: &'static str, + len: usize, + ) -> Result { + Ok(ArraySerializer { + env: self.env, + array: self.env.create_array_with_length(len)?, + }) + } + + fn serialize_tuple_variant( + self, + _name: &'static str, + _variant_index: u32, + _variant: &'static str, + len: usize, + ) -> Result { + Ok(TupleVariantSerializer { + outter_object: self.env.create_object()?, + inner: ArraySerializer { + env: self.env, + array: self.env.create_array_with_length(len)?, + }, + }) + } + + fn serialize_map(self, _len: Option) -> Result { + Ok(MapSerializer { + env: self.env, + object: self.env.create_object()?, + key_holder: self.env.create_object()?, + }) + } + + fn serialize_struct( + self, + _name: &'static str, + _len: usize, + ) -> Result { + Ok(StructSerializer { + env: self.env, + object: self.env.create_object()?, + }) + } + + fn serialize_struct_variant( + self, + _name: &'static str, + _variant_index: u32, + _variant: &'static str, + _len: usize, + ) -> Result { + Ok(StructVariantSerializer { + outer_object: self.env.create_object()?, + inner: StructSerializer { + env: self.env, + object: self.env.create_object()?, + }, + }) + } +} + +impl SerializeSeq for ArraySerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_element(&mut self, value: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + let value = serialize(self.env, &value)?; + let cur_len = self.array.get_array_length()?; + self.array.set_index(cur_len as _, value)?; + Ok(()) + } + + fn end(self) -> Result { + Ok(self.array.into_unknown()?) + } +} + +impl SerializeTuple for ArraySerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_element(&mut self, value: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + SerializeSeq::serialize_element(self, value) + } + + fn end(self) -> Result { + SerializeSeq::end(self) + } +} + +impl SerializeTupleStruct for ArraySerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_field(&mut self, value: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + SerializeSeq::serialize_element(self, value) + } + + fn end(self) -> Result { + SerializeSeq::end(self) + } +} + +impl SerializeTupleVariant for TupleVariantSerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_field(&mut self, value: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + SerializeSeq::serialize_element(&mut self.inner, value) + } + + fn end(self) -> Result { + Ok(self.outter_object.into_unknown()?) + } +} + +impl SerializeMap for MapSerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_key(&mut self, key: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + let key = serialize(self.env, &key)?; + self.key_holder.set_named_property("key", key)?; + Ok(()) + } + + fn serialize_value(&mut self, value: &T) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + let key = self.key_holder.get_named_property("key")?; + let value = serialize(self.env, &value)?; + self.object.set_property(key, value)?; + Ok(()) + } + + fn end(self) -> Result { + Ok(self.object.into_unknown()?) + } +} + +impl SerializeStruct for StructSerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_field( + &mut self, + key: &'static str, + value: &T, + ) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + let value = serialize(self.env, &value)?; + self.object.set_named_property(key, value)?; + Ok(()) + } + + fn end(self) -> Result { + Ok(self.object.into_unknown()?) + } +} + +impl SerializeStructVariant for StructVariantSerializer<'_> { + type Ok = JsUnknown; + type Error = Error; + + fn serialize_field( + &mut self, + key: &'static str, + value: &T, + ) -> Result<(), Self::Error> + where + T: serde::Serialize, + { + SerializeStruct::serialize_field(&mut self.inner, key, value)?; + Ok(()) + } + + fn end(self) -> Result { + Ok(self.outer_object.into_unknown()?) + } +} diff --git a/native/src/parse.rs b/native/src/parse.rs index c2ca4d08ae4..719b6940503 100644 --- a/native/src/parse.rs +++ b/native/src/parse.rs @@ -1,6 +1,9 @@ -use crate::JsCompiler; -use anyhow::{Context as _, Error}; -use neon::prelude::*; +use crate::{ + get_compiler, + util::{CtxtExt, MapErr}, +}; +use anyhow::Context as _; +use napi::{CallContext, Env, JsObject, JsString, Task}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -23,178 +26,136 @@ pub struct ParseFileTask { pub options: ParseOptions, } -pub fn complete_parse<'a>( - mut cx: impl Context<'a>, - result: Result, - c: &Compiler, -) -> JsResult<'a, JsValue> { - c.run(|| match result { - Ok(program) => Ok(cx - .string(serde_json::to_string(&program).expect("failed to serialize Program")) - .upcast()), - Err(err) => cx.throw_error(format!("{:?}", err)), - }) +pub fn complete_parse<'a>(env: &Env, program: Program, _c: &Compiler) -> napi::Result { + let s = serde_json::to_string(&program) + .context("failed to serialize Program") + .convert_err()?; + env.create_string_from_std(s) } impl Task for ParseTask { type Output = Program; - type Error = Error; - type JsEvent = JsValue; + type JsValue = JsString; - fn perform(&self) -> Result { - self.c.run(|| { - self.c.parse_js( + fn compute(&mut self) -> napi::Result { + let program = self + .c + .parse_js( self.fm.clone(), self.options.target, self.options.syntax, self.options.is_module, self.options.comments, ) - }) + .convert_err()?; + + Ok(program) } - fn complete( - self, - cx: TaskContext, - result: Result, - ) -> JsResult { - complete_parse(cx, result, &self.c) + fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result { + complete_parse(env, result, &self.c) } } impl Task for ParseFileTask { type Output = Program; - type Error = Error; - type JsEvent = JsValue; + type JsValue = JsString; - fn perform(&self) -> Result { + fn compute(&mut self) -> napi::Result { self.c.run(|| { let fm = self .c .cm .load_file(&self.path) - .context("failed to read module")?; + .context("failed to read module") + .convert_err()?; - self.c.parse_js( - fm, - self.options.target, - self.options.syntax, - self.options.is_module, - self.options.comments, - ) + self.c + .parse_js( + fm, + self.options.target, + self.options.syntax, + self.options.is_module, + self.options.comments, + ) + .convert_err() }) } - fn complete( - self, - cx: TaskContext, - result: Result, - ) -> JsResult { - complete_parse(cx, result, &self.c) + fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result { + complete_parse(env, result, &self.c) } } -pub fn parse(mut cx: MethodContext) -> JsResult { - let src = cx.argument::(0)?; - let options_arg = cx.argument::(1)?; - let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?; - let callback = cx.argument::(2)?; +#[js_function(2)] +pub fn parse(ctx: CallContext) -> napi::Result { + let c = get_compiler(&ctx); + let src = ctx.get::(0)?; + let options: ParseOptions = ctx.get_deserialized(1)?; - let this = cx.this(); - { - let guard = cx.lock(); - let c = this.borrow(&guard); + let fm = + c.cm.new_source_file(FileName::Anon, src.as_str()?.to_string()); - let fm = c.cm.new_source_file(FileName::Anon, src.value()); + ctx.env.spawn(ParseTask { + c: c.clone(), + fm, + options, + }) +} - ParseTask { - c: c.clone(), +#[js_function(2)] +pub fn parse_sync(cx: CallContext) -> napi::Result { + let c = get_compiler(&cx); + + c.run(|| { + let src = cx.get::(0)?.as_str()?.to_string(); + let options: ParseOptions = cx.get_deserialized(1)?; + + let program = { + let fm = c.cm.new_source_file(FileName::Anon, src); + c.parse_js( + fm, + options.target, + options.syntax, + options.is_module, + options.comments, + ) + } + .convert_err()?; + + complete_parse(&cx.env, program, &c) + }) +} + +#[js_function(2)] +pub fn parse_file_sync(cx: CallContext) -> napi::Result { + let c = get_compiler(&cx); + let path = cx.get::(0)?; + let options: ParseOptions = cx.get_deserialized(1)?; + + let program = { + let fm = + c.cm.load_file(Path::new(path.as_str()?)) + .expect("failed to read program file"); + + c.parse_js( fm, - options, - } - .schedule(callback); - }; - - Ok(cx.undefined().upcast()) -} - -pub fn parse_sync(mut cx: MethodContext) -> JsResult { - let c; - let this = cx.this(); - { - let guard = cx.lock(); - let compiler = this.borrow(&guard); - c = compiler.clone(); + options.target, + options.syntax, + options.is_module, + options.comments, + ) } - c.run(|| { - let src = cx.argument::(0)?; - let options_arg = cx.argument::(1)?; - let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?; + .convert_err()?; - let program = { - let fm = c.cm.new_source_file(FileName::Anon, src.value()); - c.parse_js( - fm, - options.target, - options.syntax, - options.is_module, - options.comments, - ) - }; - - complete_parse(cx, program, &c) - }) + complete_parse(cx.env, program, &c) } -pub fn parse_file_sync(mut cx: MethodContext) -> JsResult { - let c; - let this = cx.this(); - { - let guard = cx.lock(); - let compiler = this.borrow(&guard); - c = compiler.clone(); - } - c.run(|| { - let path = cx.argument::(0)?; - let options_arg = cx.argument::(1)?; - let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?; +#[js_function(2)] +pub fn parse_file(cx: CallContext) -> napi::Result { + let c = get_compiler(&cx); + let path = PathBuf::from(cx.get::(0)?.as_str()?); + let options: ParseOptions = cx.get_deserialized(1)?; - let program = { - let fm = - c.cm.load_file(Path::new(&path.value())) - .expect("failed to read program file"); - - c.parse_js( - fm, - options.target, - options.syntax, - options.is_module, - options.comments, - ) - }; - - complete_parse(cx, program, &c) - }) -} - -pub fn parse_file(mut cx: MethodContext) -> JsResult { - let path = cx.argument::(0)?; - let options_arg = cx.argument::(1)?; - let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?; - let callback = cx.argument::(2)?; - - let this = cx.this(); - { - let guard = cx.lock(); - let c = this.borrow(&guard); - - ParseFileTask { - c: c.clone(), - path: path.value().into(), - options, - } - .schedule(callback); - }; - - Ok(cx.undefined().upcast()) + cx.env.spawn(ParseFileTask { c, path, options }) } diff --git a/native/src/print.rs b/native/src/print.rs index 0f493578979..9f564f396b5 100644 --- a/native/src/print.rs +++ b/native/src/print.rs @@ -1,6 +1,8 @@ -use crate::{complete_output, JsCompiler}; -use anyhow::Error; -use neon::prelude::*; +use crate::{ + complete_output, get_compiler, + util::{CtxtExt, MapErr}, +}; +use napi::{CallContext, Env, JsObject, JsString, Task}; use std::sync::Arc; use swc::{ config::{Options, SourceMapsConfig}, @@ -18,11 +20,11 @@ pub struct PrintTask { impl Task for PrintTask { type Output = TransformOutput; - type Error = Error; - type JsEvent = JsValue; - fn perform(&self) -> Result { - self.c.run(|| { - self.c.print( + type JsValue = JsObject; + + fn compute(&mut self) -> napi::Result { + self.c + .print( &self.program, self.options .source_maps @@ -36,71 +38,51 @@ impl Task for PrintTask { .minify .unwrap_or(false), ) - }) + .convert_err() } - fn complete( - self, - cx: TaskContext, - result: Result, - ) -> JsResult { - complete_output(cx, result) + fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result { + complete_output(env, result) } } -pub fn print(mut cx: MethodContext) -> JsResult { - let program = cx.argument::(0)?; +#[js_function(2)] +pub fn print(cx: CallContext) -> napi::Result { + let c = get_compiler(&cx); + let program = cx.get::(0)?; let program: Program = - serde_json::from_str(&program.value()).expect("failed to deserialize Program"); + serde_json::from_str(program.as_str()?).expect("failed to deserialize Program"); - let options = cx.argument::(1)?; - let options: Options = neon_serde::from_value(&mut cx, options)?; + let options: Options = cx.get_deserialized(1)?; - let callback = cx.argument::(2)?; - - let this = cx.this(); - { - let guard = cx.lock(); - let c = this.borrow(&guard); - - PrintTask { - c: c.clone(), - program, - options, - } - .schedule(callback) - } - - Ok(cx.undefined().upcast()) -} - -pub fn print_sync(mut cx: MethodContext) -> JsResult { - let c; - let this = cx.this(); - { - let guard = cx.lock(); - let compiler = this.borrow(&guard); - c = compiler.clone(); - } - c.run(|| { - let program = cx.argument::(0)?; - let program: Program = - serde_json::from_str(&program.value()).expect("failed to deserialize Program"); - - let options = cx.argument::(1)?; - let options: Options = neon_serde::from_value(&mut cx, options)?; - - let result = { - c.print( - &program, - options - .source_maps - .clone() - .unwrap_or(SourceMapsConfig::Bool(false)), - None, - options.config.unwrap_or_default().minify.unwrap_or(false), - ) - }; - complete_output(cx, result) + cx.env.spawn(PrintTask { + c: c.clone(), + program, + options, }) } + +#[js_function(2)] +pub fn print_sync(cx: CallContext) -> napi::Result { + let c = get_compiler(&cx); + + let program = cx.get::(0)?; + let program: Program = + serde_json::from_str(&program.as_str()?).expect("failed to deserialize Program"); + + let options: Options = cx.get_deserialized(1)?; + + let result = { + c.print( + &program, + options + .source_maps + .clone() + .unwrap_or(SourceMapsConfig::Bool(false)), + None, + options.config.unwrap_or_default().minify.unwrap_or(false), + ) + } + .convert_err()?; + complete_output(cx.env, result) +} diff --git a/native/src/transform.rs b/native/src/transform.rs index a81a72b0f0a..34e539e4c37 100644 --- a/native/src/transform.rs +++ b/native/src/transform.rs @@ -1,6 +1,9 @@ -use crate::{complete_output, JsCompiler}; +use crate::{ + complete_output, get_compiler, + util::{CtxtExt, MapErr}, +}; use anyhow::{Context as _, Error}; -use neon::prelude::*; +use napi::{CallContext, Env, JsBoolean, JsObject, JsString, Task}; use path_clean::clean; use std::{ path::{Path, PathBuf}, @@ -29,95 +32,75 @@ pub struct TransformTask { impl Task for TransformTask { type Output = TransformOutput; - type Error = Error; - type JsEvent = JsValue; + type JsValue = JsObject; - fn perform(&self) -> Result { - self.c.run(|| match self.input { - Input::Program(ref s) => { - let program: Program = - serde_json::from_str(&s).expect("failed to deserialize Program"); - // TODO: Source map - self.c.process_js(program, &self.options) - } + fn compute(&mut self) -> napi::Result { + self.c + .run(|| match self.input { + Input::Program(ref s) => { + let program: Program = + serde_json::from_str(&s).expect("failed to deserialize Program"); + // TODO: Source map + self.c.process_js(program, &self.options) + } - Input::File(ref path) => { - let fm = self.c.cm.load_file(path).context("failed to read module")?; - self.c.process_js_file(fm, &self.options) - } + Input::File(ref path) => { + let fm = self.c.cm.load_file(path).context("failed to read module")?; + self.c.process_js_file(fm, &self.options) + } - Input::Source(ref s) => self.c.process_js_file(s.clone(), &self.options), - }) + Input::Source(ref s) => self.c.process_js_file(s.clone(), &self.options), + }) + .convert_err() } - fn complete( - self, - cx: TaskContext, - result: Result, - ) -> JsResult { - complete_output(cx, result) + fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result { + complete_output(env, result) } } /// returns `compiler, (src / path), options, plugin, callback` -pub fn schedule_transform(mut cx: MethodContext, op: F) -> JsResult +pub fn schedule_transform(cx: CallContext, op: F) -> napi::Result where F: FnOnce(&Arc, String, bool, Options) -> TransformTask, { - let c; - let this = cx.this(); - { - let guard = cx.lock(); - c = this.borrow(&guard).clone(); - }; + let c = get_compiler(&cx); - let s = cx.argument::(0)?.value(); - let is_module = cx.argument::(1)?; - let options_arg = cx.argument::(2)?; + let s = cx.get::(0)?.as_str()?.to_string(); + let is_module = cx.get::(1)?; + let options: Options = cx.get_deserialized(2)?; - let options: Options = neon_serde::from_value(&mut cx, options_arg)?; - let callback = cx.argument::(3)?; + let task = op(&c, s, is_module.get_value()?, options); - let task = op(&c, s, is_module.value(), options); - task.schedule(callback); - - Ok(cx.undefined().upcast()) + cx.env.spawn(task) } -pub fn exec_transform(mut cx: MethodContext, op: F) -> JsResult +pub fn exec_transform(cx: CallContext, op: F) -> napi::Result where F: FnOnce(&Compiler, String, &Options) -> Result, Error>, { - let s = cx.argument::(0)?; - let is_module = cx.argument::(1)?; - let options: Options = match cx.argument_opt(2) { - Some(v) => neon_serde::from_value(&mut cx, v)?, - None => { - let obj = cx.empty_object().upcast(); - neon_serde::from_value(&mut cx, obj)? + let c = get_compiler(&cx); + + let s = cx.get::(0)?; + let is_module = cx.get::(1)?; + let options: Options = cx.get_deserialized(2)?; + + let output = c.run(|| -> napi::Result<_> { + if is_module.get_value()? { + let program: Program = + serde_json::from_str(s.as_str()?).expect("failed to deserialize Program"); + c.process_js(program, &options).convert_err() + } else { + let fm = op(&c, s.as_str()?.to_string(), &options).expect("failed to create fm"); + c.process_js_file(fm, &options).convert_err() } - }; + })?; - let this = cx.this(); - let output = { - let guard = cx.lock(); - let c = this.borrow(&guard); - c.run(|| { - if is_module.value() { - let program: Program = - serde_json::from_str(&s.value()).expect("failed to deserialize Program"); - c.process_js(program, &options) - } else { - let fm = op(&c, s.value(), &options).expect("failed to create fm"); - c.process_js_file(fm, &options) - } - }) - }; - - complete_output(cx, output) + complete_output(cx.env, output) } -pub fn transform(cx: MethodContext) -> JsResult { +#[js_function(4)] +pub fn transform(cx: CallContext) -> napi::Result { schedule_transform(cx, |c, src, is_module, options| { let input = if is_module { Input::Program(src) @@ -140,7 +123,8 @@ pub fn transform(cx: MethodContext) -> JsResult { }) } -pub fn transform_sync(cx: MethodContext) -> JsResult { +#[js_function(4)] +pub fn transform_sync(cx: CallContext) -> napi::Result { exec_transform(cx, |c, src, options| { Ok(c.cm.new_source_file( if options.filename.is_empty() { @@ -153,7 +137,8 @@ pub fn transform_sync(cx: MethodContext) -> JsResult { }) } -pub fn transform_file(cx: MethodContext) -> JsResult { +#[js_function(4)] +pub fn transform_file(cx: CallContext) -> napi::Result { schedule_transform(cx, |c, path, _, options| { let path = clean(&path); @@ -165,7 +150,8 @@ pub fn transform_file(cx: MethodContext) -> JsResult { }) } -pub fn transform_file_sync(cx: MethodContext) -> JsResult { +#[js_function(4)] +pub fn transform_file_sync(cx: CallContext) -> napi::Result { exec_transform(cx, |c, path, _| { Ok(c.cm .load_file(Path::new(&path)) diff --git a/native/src/util.rs b/native/src/util.rs new file mode 100644 index 00000000000..9d4b336f3b5 --- /dev/null +++ b/native/src/util.rs @@ -0,0 +1,36 @@ +use anyhow::Context; +use napi::{CallContext, JsBuffer, NapiValue, Status}; +use serde::de::DeserializeOwned; + +pub trait MapErr: Into> { + fn convert_err(self) -> napi::Result { + self.into() + .map_err(|err| napi::Error::new(Status::GenericFailure, format!("{:?}", err))) + } +} + +impl MapErr for Result {} + +pub trait CtxtExt { + /// Currently this uses JsBuffer + fn get_deserialized(&self, index: usize) -> napi::Result + where + T: DeserializeOwned; +} + +impl CtxtExt for CallContext<'_, V> +where + V: NapiValue, +{ + fn get_deserialized(&self, index: usize) -> napi::Result + where + T: DeserializeOwned, + { + let buffer = self.get::(index)?; + let v = serde_json::from_slice(&buffer) + .with_context(|| format!("Argument at `{}` is not JsBuffer", index)) + .convert_err()?; + + Ok(v) + } +} diff --git a/node-swc/__tests__/spack/multi_entry_test.js b/node-swc/__tests__/spack/multi_entry_test.js index c13297b80f7..040ef07323a 100644 --- a/node-swc/__tests__/spack/multi_entry_test.js +++ b/node-swc/__tests__/spack/multi_entry_test.js @@ -1,34 +1,42 @@ const swc = require("../../.."); const path = require('path'); +const os = require('os'); -it('should handle multiple entries on same level', async () => { - const result = await swc.bundle({ - entry: { - a: path.join(__dirname, '../../tests/spack/mutli-entry-same-level/src/a.js'), - b: path.join(__dirname, '../../tests/spack/mutli-entry-same-level/src/b.js'), - } + +if (os.platform() !== 'win32') { + it('should handle multiple entries on same level', async () => { + const result = await swc.bundle({ + entry: { + a: path.join(__dirname, '../../tests/spack/mutli-entry-same-level/src/a.js'), + b: path.join(__dirname, '../../tests/spack/mutli-entry-same-level/src/b.js'), + } + }); + + expect(result.a).toBeTruthy(); + expect(result.a.code).toContain(`import { foo } from './common-`); + + expect(result.b).toBeTruthy(); + expect(result.b.code).toContain(`import { foo } from './common-`); }); - expect(result.a).toBeTruthy(); - expect(result.a.code).toContain(`import { foo } from './common-`); - expect(result.b).toBeTruthy(); - expect(result.b.code).toContain(`import { foo } from './common-`); -}); + it('should handle multiple entries on different level', async () => { + const result = await swc.bundle({ + entry: { + web: path.join(__dirname, '../../tests/spack/mutli-entry-different-level/src/web/index.js'), + a: path.join(__dirname, '../../tests/spack/mutli-entry-different-level/src/a.js'), + } + }); + expect(result.a).toBeTruthy(); + expect(result.a.code).toContain(`import { foo } from './common-`); -it('should handle multiple entries on different level', async () => { - const result = await swc.bundle({ - entry: { - web: path.join(__dirname, '../../tests/spack/mutli-entry-different-level/src/web/index.js'), - a: path.join(__dirname, '../../tests/spack/mutli-entry-different-level/src/a.js'), - } + expect(result.web).toBeTruthy(); + expect(result.web.code).toContain(`../common`); }); - - expect(result.a).toBeTruthy(); - expect(result.a.code).toContain(`import { foo } from './common-`); - - expect(result.web).toBeTruthy(); - expect(result.web.code).toContain(`../common`); -}); \ No newline at end of file +} else { + it('should not be error on windows', async () => { + expect('I hate windows').toBeTruthy() + }); +} \ No newline at end of file diff --git a/node-swc/src/extensions.js b/node-swc/src/extensions.js deleted file mode 100644 index 2bb521d8361..00000000000 --- a/node-swc/src/extensions.js +++ /dev/null @@ -1,490 +0,0 @@ -/*! - * node-swc: lib/extensions.js - */ - -var eol = require("os").EOL, - fs = require("fs"), - pkg = require("../../package.json"), - mkdir = require("mkdirp"), - path = require("path"), - defaultBinaryDir = path.join(__dirname, "..", "..", "native"), - trueCasePathSync = require("true-case-path"); - -/** - * Get the human readable name of the Platform that is running - * - * @param {string} platform - An OS platform to match, or null to fallback to - * the current process platform - * @return {Object} The name of the platform if matched, false otherwise - * - * @api public - */ -function getHumanPlatform(platform) { - switch (platform || process.platform) { - case "darwin": - return "OS X"; - case "freebsd": - return "FreeBSD"; - case "linux": - return "Linux"; - case "linux_musl": - return "Linux/musl"; - case "win32": - return "Windows"; - default: - return false; - } -} - -/** - * Provides a more readable version of the architecture - * - * @param {string} arch - An instruction architecture name to match, or null to - * lookup the current process architecture - * @return {Object} The value of the process architecture, or false if unknown - * - * @api public - */ -function getHumanArchitecture(arch) { - switch (arch || process.arch) { - case "ia32": - return "32-bit"; - case "x86": - return "32-bit"; - case "x64": - return "64-bit"; - default: - return false; - } -} - -/** - * Get the friendly name of the Node environment being run - * - * @param {Object} abi - A Node Application Binary Interface value, or null to - * fallback to the current Node ABI - * @return {Object} Returns a string name of the Node environment or false if - * unmatched - * - * @api public - */ -function getHumanNodeVersion(abi) { - switch (parseInt(abi || process.versions.modules, 10)) { - case 11: - return "Node 0.10.x"; - case 14: - return "Node 0.12.x"; - case 42: - return "io.js 1.x"; - case 43: - return "io.js 1.1.x"; - case 44: - return "io.js 2.x"; - case 45: - return "io.js 3.x"; - case 46: - return "Node.js 4.x"; - case 47: - return "Node.js 5.x"; - case 48: - return "Node.js 6.x"; - case 49: - return "Electron 1.3.x"; - case 50: - return "Electron 1.4.x"; - case 51: - return "Node.js 7.x"; - case 53: - return "Electron 1.6.x"; - case 57: - return "Node.js 8.x"; - case 59: - return "Node.js 9.x"; - case 64: - return "Node.js 10.x"; - case 67: - return "Node.js 11.x"; - default: - return false; - } -} - -/** - * Get a human readable description of where node-swc is running to support - * user error reporting when something goes wrong - * - * @param {string} env - The name of the native bindings that is to be parsed - * @return {string} A description of what os, architecture, and Node version - * that is being run - * - * @api public - */ -function getHumanEnvironment(env) { - var binding = env.replace(/_binding\.node$/, ""), - parts = binding.split("-"), - platform = getHumanPlatform(parts[0]), - arch = getHumanArchitecture(parts[1]), - runtime = getHumanNodeVersion(parts[2]); - - if (parts.length !== 3) { - return "Unknown environment (" + binding + ")"; - } - - if (!platform) { - platform = "Unsupported platform (" + parts[0] + ")"; - } - - if (!arch) { - arch = "Unsupported architecture (" + parts[1] + ")"; - } - - if (!runtime) { - runtime = "Unsupported runtime (" + parts[2] + ")"; - } - - return [platform, arch, "with", runtime].join(" "); -} - -/** - * Get the value of the binaries under the default path - * - * @return {Array} The currently installed node-swc bindings - * - * @api public - */ -function getInstalledBinaries() { - return fs.readdirSync(getBinaryDir()); -} - -/** - * Check that an environment matches the whitelisted values or the current - * environment if no parameters are passed - * - * @param {string} platform - The name of the OS platform(darwin, win32, etc...) - * @param {string} arch - The instruction set architecture of the Node environment - * @param {string} abi - The Node Application Binary Interface - * @return {Boolean} True, if node-swc supports the current platform, false otherwise - * - * @api public - */ -function isSupportedEnvironment(platform, arch, abi) { - return ( - false !== getHumanPlatform(platform) && - false !== getHumanArchitecture(arch) && - false !== getHumanNodeVersion(abi) - ); -} - -/** - * Get the value of a CLI argument - * - * @param {String} name - * @param {Array} args - * @api private - */ - -function getArgument(name, args) { - var flags = args || process.argv.slice(2), - index = flags.lastIndexOf(name); - - if (index === -1 || index + 1 >= flags.length) { - return null; - } - - return flags[index + 1]; -} - -/** - * Get binary name. - * If environment variable SWC_BINARY_NAME, - * .npmrc variable swc_binary_name or - * process argument --binary-name is provided, - * return it as is, otherwise make default binary - * name: {platform}-{arch}-{v8 version}.node - * - * @api public - */ - -function getBinaryName() { - var binaryName, - variant, - platform = process.platform; - - if (getArgument("--swc-binary-name")) { - binaryName = getArgument("--swc-binary-name"); - } else if (process.env.SWC_BINARY_NAME) { - binaryName = process.env.SWC_BINARY_NAME; - } else if (process.env.npm_config_swc_binary_name) { - binaryName = process.env.npm_config_swc_binary_name; - } else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryName) { - binaryName = pkg.nodeSwcConfig.binaryName; - } else { - variant = getPlatformVariant(); - if (variant) { - platform += "_" + variant; - } - - binaryName = [ - platform, - "-", - process.arch, - "-", - process.versions.modules, - ].join(""); - } - - return [binaryName, ".node"].join(""); -} - -/** - * Determine the URL to fetch binary file from. - * By default fetch from the node-swc distribution - * site on GitHub. - * - * The default URL can be overriden using - * the environment variable SWC_BINARY_SITE, - * .npmrc variable swc_binary_site or - * or a command line option --swc-binary-site: - * - * node scripts/install.js --swc-binary-site http://example.com/ - * - * The URL should to the mirror of the repository - * laid out as follows: - * - * SWC_BINARY_SITE/ - * - * v3.0.0 - * v3.0.0/freebsd-x64-14_binding.node - * .... - * v3.0.0 - * v3.0.0/freebsd-ia32-11_binding.node - * v3.0.0/freebsd-x64-42_binding.node - * ... etc. for all supported versions and platforms - * - * @api public - */ - -function getBinaryUrl() { - var site = - getArgument("--swc-binary-site") || - process.env.SWC_BINARY_SITE || - process.env.npm_config_swc_binary_site || - (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binarySite) || - "https://github.com/swc-project/node-swc/releases/download"; - - return [site, "v" + pkg.version, getBinaryName()].join("/"); -} - -/** - * Get binary dir. - * If environment variable SWC_BINARY_DIR, - * .npmrc variable SWC_BINARY_DIR or - * process argument --swc-binary-dir is provided, - * select it by appending binary name, otherwise - * use default binary dir. - * Once the primary selection is made, check if - * callers wants to throw if file not exists before - * returning. - * - * @api public - */ - -function getBinaryDir() { - var binaryDir; - - if (getArgument("--swc-binary-dir")) { - binaryDir = getArgument("--swc-binary-dir"); - } else if (process.env.SWC_BINARY_DIR) { - binaryDir = process.env.SWC_BINARY_DIR; - } else if (process.env.npm_config_SWC_BINARY_DIR) { - binaryDir = process.env.npm_config_SWC_BINARY_DIR; - } else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryDir) { - binaryDir = pkg.nodeSwcConfig.binaryDir; - } else { - binaryDir = defaultBinaryDir; - } - - return binaryDir; -} - -/** - * Get binary path. - * If environment variable SWC_BINARY_PATH, - * .npmrc variable SWC_BINARY_PATH or - * process argument --swc-binary-path is provided, - * select it by appending binary name, otherwise - * make default binary path using binary name. - * Once the primary selection is made, check if - * callers wants to throw if file not exists before - * returning. - * - * @api public - */ - -function getBinaryPath() { - var binaryPath; - - if (getArgument("--swc-binary-path")) { - binaryPath = getArgument("--swc-binary-path"); - } else if (process.env.SWC_BINARY_PATH) { - binaryPath = process.env.SWC_BINARY_PATH; - } else if (process.env.npm_config_SWC_BINARY_PATH) { - binaryPath = process.env.npm_config_SWC_BINARY_PATH; - } else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryPath) { - binaryPath = pkg.nodeSwcConfig.binaryPath; - } else { - binaryPath = path.join(getBinaryDir(), "index.node"); - } - - if (process.versions.modules < 46) { - return binaryPath; - } - - try { - return trueCasePathSync(binaryPath) || binaryPath; - } catch (e) { - return binaryPath; - } -} - -/** - * An array of paths suitable for use as a local disk cache of the binding. - * - * @return {[]String} an array of paths - * @api public - */ -function getCachePathCandidates() { - return [ - process.env.npm_config_swc_binary_cache, - process.env.npm_config_cache, - ].filter(function (_) { - return _; - }); -} - -/** - * The most suitable location for caching the binding on disk. - * - * Given the candidates directories provided by `getCachePathCandidates()` this - * returns the first writable directory. By treating the candidate directories - * as a prioritised list this method is deterministic, assuming no change to the - * local environment. - * - * @return {String} directory to cache binding - * @api public - */ -function getBinaryCachePath() { - var i, - cachePath, - cachePathCandidates = getCachePathCandidates(); - - for (i = 0; i < cachePathCandidates.length; i++) { - cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); - - try { - mkdir.sync(cachePath); - return cachePath; - } catch (e) { - // Directory is not writable, try another - } - } - - return ""; -} - -/** - * The cached binding - * - * Check the candidates directories provided by `getCachePathCandidates()` for - * the binding file, if it exists. By treating the candidate directories - * as a prioritised list this method is deterministic, assuming no change to the - * local environment. - * - * @return {String} path to cached binary - * @api public - */ -function getCachedBinary() { - var i, - cachePath, - cacheBinary, - cachePathCandidates = getCachePathCandidates(), - binaryName = getBinaryName(); - - for (i = 0; i < cachePathCandidates.length; i++) { - cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); - cacheBinary = path.join(cachePath, binaryName); - - if (fs.existsSync(cacheBinary)) { - return cacheBinary; - } - } - - return ""; -} - -/** - * Does the supplied binary path exist - * - * @param {String} binaryPath - * @api public - */ - -function hasBinary(binaryPath) { - return fs.existsSync(binaryPath); -} - -/** - * Get Swc version information - * - * @api public - */ - -function getVersionInfo(binding) { - return [ - ["node-swc", pkg.version, "(Wrapper)", "[JavaScript]"].join("\t"), - // ['libswc ', binding.libswcVersion(), '(Swc Compiler)', '[C/C++]'].join('\t'), - ].join(eol); -} - -/** - * Gets the platform variant, currently either an empty string or 'musl' for Linux/musl platforms. - * - * @api public - */ - -function getPlatformVariant() { - var contents = ""; - - if (process.platform !== "linux") { - return ""; - } - - try { - contents = fs.readFileSync(process.execPath); - - // Buffer.indexOf was added in v1.5.0 so cast to string for old node - // Delay contents.toStrings because it's expensive - if (!contents.indexOf) { - contents = contents.toString(); - } - - if (contents.indexOf("libc.musl-x86_64.so.1") !== -1) { - return "musl"; - } - } catch (err) {} // eslint-disable-line no-empty - - return ""; -} - -module.exports.hasBinary = hasBinary; -module.exports.getBinaryUrl = getBinaryUrl; -module.exports.getBinaryName = getBinaryName; -module.exports.getBinaryDir = getBinaryDir; -module.exports.getBinaryPath = getBinaryPath; -module.exports.getBinaryCachePath = getBinaryCachePath; -module.exports.getCachedBinary = getCachedBinary; -module.exports.getCachePathCandidates = getCachePathCandidates; -module.exports.getVersionInfo = getVersionInfo; -module.exports.getHumanEnvironment = getHumanEnvironment; -module.exports.getInstalledBinaries = getInstalledBinaries; -module.exports.isSupportedEnvironment = isSupportedEnvironment; diff --git a/node-swc/src/index.ts b/node-swc/src/index.ts index 7394d02ff7a..bb74fa2b339 100644 --- a/node-swc/src/index.ts +++ b/node-swc/src/index.ts @@ -8,10 +8,45 @@ import { Program, } from "./types"; export * from "./types"; -import { wrapNativeSuper } from "./util"; import { BundleInput, compileBundleOptions } from "./spack"; +import { loadBinding } from '@node-rs/helper'; +import { platform } from 'os'; -const native = require("./native"); + +let bindings: any +let linuxError = null + +try { + bindings = loadBinding(__dirname, 'swc') +} catch (e) { + const platformName = platform() + try { + bindings = require(`@swc/core-${platformName}`) + } catch (e) { + if (platformName !== 'linux') { + throw new TypeError('Not compatible with your platform. Error message: ' + e.message) + } else { + linuxError = e + } + } +} + +if (!bindings) { + try { + require.resolve('@swc/core-linux-musl') + } catch (e) { + throw new TypeError( + `Could not load @swc/core-linux, You may need add @swc/core-linux-musl to optionalDependencies of your project`, + ) + } + try { + bindings = require('@swc/core-linux-musl') + } catch (e) { + throw new TypeError( + `Linux glibc version load error: ${linuxError.message}; Linux musl version load error: Error message: ${e.message}`, + ) + } +} /** * Version of the swc binding. @@ -28,26 +63,18 @@ export function plugins(ps: Plugin[]): Plugin { }; } -export class Compiler extends wrapNativeSuper(native.Compiler) { - public constructor() { - super(); - } - +export class Compiler { parse( src: string, options: ParseOptions & { isModule: false } ): Promise