Migrate to napi (#1009)

This commit is contained in:
강동윤 2020-08-30 15:29:42 +09:00 committed by GitHub
parent b443a1e372
commit 8e237288d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
68 changed files with 1352 additions and 1536 deletions

View File

@ -15,7 +15,6 @@ on: [push, pull_request]
env:
CARGO_INCREMENTAL: 0
CI: "1"
SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1
jobs:
fmt:
@ -34,9 +33,9 @@ jobs:
steps:
- uses: actions/checkout@v2
# - uses: actions/setup-node@v2-beta
# with:
# node-version: '12'
- uses: actions/setup-node@v2-beta
with:
node-version: "12"
- name: Checkout submodules
shell: bash
@ -51,6 +50,16 @@ jobs:
npm i browserslist regenerator-runtime sourcemap-validator progress
npm i -g jest
- name: Install llvm
if: matrix.os == 'windows-latest'
run: choco install -y llvm
- name: Set llvm path
if: matrix.os == 'windows-latest'
uses: allenevans/set-env@v1.0.0
with:
LIBCLANG_PATH: 'C:\\Program Files\\LLVM\\bin'
- name: Cache
uses: actions/cache@v2
with:
@ -63,7 +72,6 @@ jobs:
- name: Run cargo check for all targets
run: cargo check --color always --all --all-targets
- name: Download typescript parser test suite
run: |
git clone --depth 1 https://github.com/swc-project/ts-parser-test-ref.git ecmascript/parser/tests/typescript/tsc
@ -71,7 +79,7 @@ jobs:
- name: Run cargo test
run: |
export PATH="$PATH:$HOME/npm/bin"
cargo test --color always --all --exclude swc_ecma_parser
cargo test --color always --all --exclude node --exclude wasm
#
deploy-docs:

View File

@ -15,7 +15,6 @@ on: [push, pull_request]
env:
CARGO_INCREMENTAL: 0
CI: "1"
SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1
jobs:
check:

View File

@ -5,7 +5,6 @@ on: [push, pull_request]
env:
CARGO_INCREMENTAL: 0
CI: "1"
SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1
jobs:
integration-test:
@ -26,13 +25,23 @@ jobs:
**/target/
key: ${{ runner.os }}-cargo-integration
- name: Set platform name
run: |
export NODE_PLATFORM_NAME=$(node -e "console.log(require('os').platform())")
echo "::set-env name=PLATFORM_NAME::$NODE_PLATFORM_NAME"
shell: bash
- name: Prepare
run: |
npm config set prefix ~/npm
npm i
export PATH="$PATH:$HOME/npm/bin"
npx tsc
npx neon build --release
npm run build
cp native/node.$PLATFORM_NAME.node swc.$PLATFORM_NAME.node
cp native/node.$PLATFORM_NAME.node scripts/npm/core-$PLATFORM_NAME/swc.node
(cd scripts/npm/core-$PLATFORM_NAME && npm link)
npm i -g @swc/cli
npm link
@ -51,6 +60,7 @@ jobs:
(cd integration-tests/three-js/build/test && qunit -r failonlyreporter unit/three.source.unit.js)
# terser: contains with statement in test
# Rome.js: I forgot the cause, but it didn't work.
# jQuery: browser only (window.document is required)

View File

@ -1,35 +0,0 @@
name: Node binding
on: [push, pull_request]
env:
CARGO_INCREMENTAL: 0
CI: "1"
SKIP_SWC_BINARY_DOWNLOAD_FOR_CI: 1
jobs:
integration-test:
name: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2-beta
with:
node-version: '12'
- name: Install node dependencies
run: |
npm config set prefix ~/npm
npm i
- name: Build node module
run: |
export PATH="$PATH:$HOME/npm/bin"
npx tsc
npx neon build
- name: Test
run: |
export PATH="$PATH:$HOME/npm/bin"
npx jest node-swc/__tests__

114
.github/workflows/publish.yml vendored Normal file
View File

@ -0,0 +1,114 @@
name: Publish
on:
create:
tags:
- v*
# on: [push, pull_request]
env:
CARGO_INCREMENTAL: 0
CI: "1"
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Build - ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: 12
- name: Cache
uses: actions/cache@v2
with:
path: |
~/.cargo/
**/target/
key: ${{ runner.os }}-publish-integration
- name: Set platform name
run: |
export NODE_PLATFORM_NAME=$(node -e "console.log(require('os').platform())")
echo "::set-env name=PLATFORM_NAME::$NODE_PLATFORM_NAME"
shell: bash
- name: Install llvm
if: matrix.os == 'windows-latest'
run: choco install -y llvm
- name: Set llvm path
if: matrix.os == 'windows-latest'
uses: allenevans/set-env@v1.0.0
with:
LIBCLANG_PATH: 'C:\\Program Files\\LLVM\\bin'
- name: Install node dependencies
run: |
npm i
- name: Build
shell: bash
run: |
npm run build
cp native/node.$PLATFORM_NAME.node swc.$PLATFORM_NAME.node
cp native/node.$PLATFORM_NAME.node node.$PLATFORM_NAME.node
cp native/node.$PLATFORM_NAME.node scripts/npm/core-$PLATFORM_NAME/swc.node
(cd scripts/npm/core-$PLATFORM_NAME && npm link)
- name: Upload artifact
uses: actions/upload-artifact@v2
with:
name: bindings
path: node.${{ env.PLATFORM_NAME }}.node
- name: List packages
run: ls -R ./scripts/npm/
shell: bash
- name: Test bindings
run: |
npm link @swc/core-$PLATFORM_NAME
npm test
publish:
name: npm
runs-on: ubuntu-latest
needs:
- build
steps:
- uses: actions/checkout@v2
- name: Setup node
uses: actions/setup-node@v1
with:
node-version: 12
# Do not cache node_modules, or yarn workspace links broken
- name: Install dependencies
run: npm i
- name: Download all artifacts
uses: actions/download-artifact@v2
with:
path: ./native
- name: List binaries
run: ls -R ./native/
shell: bash
- name: Move binaries
shell: bash
run: |
cp ./native/bindings/*.node ./native
- uses: JS-DevTools/npm-publish@v1
with:
token: ${{ secrets.NPM_TOKEN }}

View File

@ -21,3 +21,12 @@ wasm/
**/target/
*.svg
integration-tests/
# napi is cool
*.rs
/scripts/npm
/native/
/src
/.vscode
/ecmascript/transforms/src/helpers
/node-swc/__tests__/

View File

@ -13,7 +13,7 @@
"editor.formatOnSave": true
},
"[typescript]": {
"editor.formatOnSave": false
"editor.formatOnSave": true
},
"rust.rustflags": "--cfg procmacro2_semver_exempt",
// Important

View File

@ -1,5 +1,5 @@
error: Unterminated JSX contents
--> $DIR/tests/jsx/errors/nested-fragment-unclosed/input.js:1:8
error: Unexpected eof
--> $DIR/tests/jsx/errors/nested-fragment-unclosed/input.js:1:9
|
1 | <><></>
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated JSX contents
--> $DIR/tests/jsx/errors/unclosed-tag/input.js:1:6
error: Unexpected eof
--> $DIR/tests/jsx/errors/unclosed-tag/input.js:1:10
|
1 | <foo>yes
| ^^^^
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated string constant
--> $DIR/tests/jsx/errors/unterminated-string/input.js:1:10
error: Unexpected eof
--> $DIR/tests/jsx/errors/unterminated-string/input.js:1:12
|
1 | <foo bar="
| ^^
| ^

View File

@ -1,6 +1,6 @@
error: Expected corresponding JSX closing tag for <>
--> $DIR/tests/jsx/errors/wrong-closing-tag-fragment/input.js:1:3
error: Unexpected eof
--> $DIR/tests/jsx/errors/wrong-closing-tag-fragment/input.js:1:15
|
1 | <></something>
| ^^^^^^^^^^^^
| ^

View File

@ -1,6 +1,6 @@
error: Expected corresponding JSX closing tag for <Foo>
--> $DIR/tests/jsx/errors/wrong-closing-tag/input.js:1:6
error: Unexpected eof
--> $DIR/tests/jsx/errors/wrong-closing-tag/input.js:1:12
|
1 | <Foo></Bar>
| ^^^^^^
| ^

View File

@ -1,6 +1,6 @@
error: Expected corresponding JSX closing tag for <something>
--> $DIR/tests/jsx/errors/wrong-opening-tag-fragment/input.js:1:12
error: Unexpected eof
--> $DIR/tests/jsx/errors/wrong-opening-tag-fragment/input.js:1:15
|
1 | <something></>
| ^^^
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated regexp literal
--> $DIR/tests/test262-parser/fail/095bea002b10b8e1.js:1:5
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/095bea002b10b8e1.js:1:8
|
1 | foo[/42
| ^^^
| ^

View File

@ -1,5 +1,5 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/19699bcdea35eb46.js:1:17
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/19699bcdea35eb46.js:1:19
|
1 | 'use strict'; ('\4')
| ^^

View File

@ -1,5 +1,5 @@
error: Expected ], got <eof>
--> $DIR/tests/test262-parser/fail/245843abef9e72e7.js:1:1
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/245843abef9e72e7.js:1:2
|
1 | [
| ^

View File

@ -1,5 +1,5 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/3990bb94b19b1071.module.js:1:3
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/3990bb94b19b1071.module.js:1:5
|
1 | ('\1')
| ^^

View File

@ -1,5 +1,5 @@
error: Expected }, got <eof>
--> $DIR/tests/test262-parser/fail/41895c8145489971.js:1:18
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/41895c8145489971.js:1:19
|
1 | `hello ${10 `test`
| ^

View File

@ -1,5 +1,5 @@
error: Parenthesized expression cannot contain spread operator
--> $DIR/tests/test262-parser/fail/4cce9feb5a563377.js:1:7
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/4cce9feb5a563377.js:1:9
|
1 | (a,...a)
| ^

View File

@ -1,5 +1,5 @@
error: Expected ], got <eof>
--> $DIR/tests/test262-parser/fail/4ee7b10cd97f554c.js:1:2
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/4ee7b10cd97f554c.js:1:3
|
1 | [,
| ^

View File

@ -1,6 +1,6 @@
error: Invalid unciode escape
--> $DIR/tests/test262-parser/fail/5427bdf48f3eb6d9.js:1:3
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/5427bdf48f3eb6d9.js:1:10
|
1 | ('\u{2028')
| ^^^^^^^
| ^^

View File

@ -1,5 +1,5 @@
error: Expected 4 hex characters
--> $DIR/tests/test262-parser/fail/575367951ac8635d.js:1:3
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/575367951ac8635d.js:1:5
|
1 | ('\u')
| ^^

View File

@ -1,6 +1,6 @@
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/647e21f8f157c338.js:1:2
--> $DIR/tests/test262-parser/fail/647e21f8f157c338.js:1:4
|
1 | ('')
| ^
| ^^

View File

@ -1,6 +1,6 @@
error: Invalid access to super
--> $DIR/tests/test262-parser/fail/73d1b1b1bc1dabfb.js:1:1
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/73d1b1b1bc1dabfb.js:1:6
|
1 | super
| ^^^^^
| ^

View File

@ -1,5 +1,5 @@
error: Parenthesized expression cannot contain spread operator
--> $DIR/tests/test262-parser/fail/82b8003b91d8b346.js:1:8
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/82b8003b91d8b346.js:1:10
|
1 | (b, ...a)
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/89036b2edb64c00c.js:1:2
--> $DIR/tests/test262-parser/fail/89036b2edb64c00c.js:2:1
|
1 | ('
| ^
2 | ')
| ^^

View File

@ -1,6 +1,6 @@
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/8af69d8f15295ed2.js:1:2
--> $DIR/tests/test262-parser/fail/8af69d8f15295ed2.js:1:4
|
1 | ('')
| ^
| ^^

View File

@ -1,5 +1,5 @@
error: Expected }, got <eof>
--> $DIR/tests/test262-parser/fail/8c353ce78b905b58.js:1:4
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/8c353ce78b905b58.js:1:5
|
1 | `${a
| ^

View File

@ -1,6 +1,6 @@
error: Expected unicode escape
--> $DIR/tests/test262-parser/fail/94535dc25ef762ee.js:1:16
error: Expected 4 hex characters
--> $DIR/tests/test262-parser/fail/94535dc25ef762ee.js:1:17
|
1 | var x = /[a-z]/\\ux
| ^
| ^^

View File

@ -1,5 +1,5 @@
error: Parenthesized expression cannot contain spread operator
--> $DIR/tests/test262-parser/fail/95c10472e36270b6.js:1:7
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/95c10472e36270b6.js:1:9
|
1 | (a,...a)
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated regexp literal
--> $DIR/tests/test262-parser/fail/97fc32bf01227e39.js:1:2
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/97fc32bf01227e39.js:1:6
|
1 | [/[/]
| ^^^^
| ^

View File

@ -1,5 +1,5 @@
error: Expected 2 hex characters
--> $DIR/tests/test262-parser/fail/b3fc8ced7ce28c35.js:1:3
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/b3fc8ced7ce28c35.js:1:5
|
1 | ('\x')
| ^^

View File

@ -1,6 +1,6 @@
error: Expected 2 hex characters
--> $DIR/tests/test262-parser/fail/b61406dafcaab4b7.js:1:3
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/b61406dafcaab4b7.js:1:6
|
1 | ('\x0')
| ^^^
| ^^

View File

@ -1,6 +1,6 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/ca2716d236c027cd.js:1:38
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/ca2716d236c027cd.js:1:51
|
1 | function hello() { 'use strict'; ({ "\1": 42 }); }
| ^^
| ^

View File

@ -1,5 +1,5 @@
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:17
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/d04aecd166354406.js:1:19
|
1 | 'use strict'; ('\1')
| ^^

View File

@ -1,6 +1,6 @@
error: Unterminated string constant
--> $DIR/tests/test262-parser/fail/dc431bcf293513a0.js:1:2
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/dc431bcf293513a0.js:1:4
|
1 | (')
| ^^
| ^

View File

@ -1,5 +1,5 @@
error: Expected ), got <eof>
--> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:15
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/e4963d9605864d9a.js:1:16
|
1 | ([(a = b)] = []
| ^

View File

@ -1,5 +1,5 @@
error: Parenthesized expression cannot contain spread operator
--> $DIR/tests/test262-parser/fail/e5fabf7fc4ae5dea.js:1:7
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/e5fabf7fc4ae5dea.js:1:9
|
1 | (a,...a)/**/
| ^

View File

@ -1,6 +1,6 @@
error: Unterminated template
--> $DIR/tests/test262-parser/fail/f06a0e67a0041175.js:1:1
error: Unexpected eof
--> $DIR/tests/test262-parser/fail/f06a0e67a0041175.js:1:6
|
1 | `test
| ^^^^^
| ^

View File

@ -117,6 +117,7 @@ fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
"15a6123f6b825c38.js",
"3bc2b27a7430f818.js",
// Tmporarily ignored
"2fa321f0374c7017.js",
"3dbb6e166b14a6c0.js",
"66e383bfd18e66ab.js",
"78c215fabdf13bae.js",

View File

@ -1,32 +1,37 @@
[package]
name = "node-swc"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
license = "MIT"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
build = "build.rs"
exclude = ["artifacts.json", "index.node"]
edition = "2018"
exclude = ["artifacts.json", "index.node"]
license = "MIT"
name = "node"
publish = false
version = "0.1.0"
[lib]
name = "ffi"
crate-type = ["cdylib"]
[build-dependencies]
neon-build = "0.4.0"
napi-build = "0.2.1"
[dependencies]
anyhow = "1"
backtrace = "0.3"
fxhash = "0.2"
napi = "0.4.13"
napi-derive = "0.4.0"
path-clean = "0.1"
serde = {version = "1", features = ["derive"]}
serde_json = "1"
spack = {path = "../spack"}
swc = {path = "../"}
swc_bundler = {path = "../bundler"}
swc_common = {path = "../common", features = ["tty-emitter", "sourcemap"]}
swc_ecma_ast = {path = "../ecmascript/ast"}
swc_ecma_parser = {path = "../ecmascript/parser"}
spack = { path = "../spack" }
backtrace = "0.3"
fxhash = "0.2"
anyhow = "1"
serde_json = "1"
neon = "0.4.0"
neon-serde = "0.4.0"
path-clean = "0.1"
serde = { version = "1", features = ["derive"] }
[target.'cfg(all(unix, not(target_env = "musl")))'.dependencies]
jemallocator = {version = "0.3", features = ["disable_initial_exec_tls"]}
[target.'cfg(windows)'.dependencies]
mimalloc = {version = "0.1"}

View File

@ -1,7 +1,5 @@
extern crate neon_build;
extern crate napi_build;
fn main() {
neon_build::setup(); // must be called in build.rs
// add project-specific build logic here...
napi_build::setup();
}

View File

@ -1,7 +1,11 @@
use crate::JsCompiler;
use anyhow::{bail, Error};
use crate::{
get_compiler,
napi_serde::serialize,
util::{CtxtExt, MapErr},
};
use anyhow::bail;
use fxhash::FxHashMap;
use neon::prelude::*;
use napi::{CallContext, Env, JsObject, Status, Task};
use serde::Deserialize;
use spack::resolvers::NodeResolver;
use std::{
@ -33,10 +37,9 @@ struct BundleTask {
impl Task for BundleTask {
type Output = FxHashMap<String, TransformOutput>;
type Error = Error;
type JsEvent = JsValue;
type JsValue = JsObject;
fn perform(&self) -> Result<Self::Output, Self::Error> {
fn compute(&mut self) -> napi::Result<Self::Output> {
let res = catch_unwind(AssertUnwindSafe(|| {
let bundler = Bundler::new(
self.swc.globals(),
@ -86,7 +89,9 @@ impl Task for BundleTask {
},
);
let result = bundler.bundle(self.config.static_items.config.entry.clone().into())?;
let result = bundler
.bundle(self.config.static_items.config.entry.clone().into())
.convert_err()?;
let result = result
.into_iter()
@ -121,7 +126,8 @@ impl Task for BundleTask {
Ok((k, output))
})
})
.collect::<Result<_, _>>()?;
.collect::<Result<_, _>>()
.convert_err()?;
Ok(result)
}));
@ -132,52 +138,30 @@ impl Task for BundleTask {
};
if let Some(s) = err.downcast_ref::<String>() {
bail!("panic detected: {}", s);
return Err(napi::Error::new(
Status::GenericFailure,
format!("panic detected: {}", s),
));
}
bail!("panic detected")
Err(napi::Error::new(
Status::GenericFailure,
format!("panic detected"),
))
}
fn complete(
self,
mut cx: TaskContext,
result: Result<Self::Output, Self::Error>,
) -> JsResult<Self::JsEvent> {
match result {
Ok(v) => Ok(neon_serde::to_value(&mut cx, &v)?.upcast()),
Err(err) => cx.throw_error(format!("{:?}", err)),
}
fn resolve(&self, env: &mut Env, output: Self::Output) -> napi::Result<Self::JsValue> {
serialize(env, &output)?.coerce_to_object()
}
}
pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let c: Arc<Compiler>;
let this = cx.this();
{
let guard = cx.lock();
let compiler = this.borrow(&guard);
c = compiler.clone();
}
#[js_function(1)]
pub(crate) fn bundle(cx: CallContext) -> napi::Result<JsObject> {
let c: Arc<Compiler> = get_compiler(&cx);
let undefined = cx.undefined();
let static_items: StaticConfigItem = cx.get_deserialized(0)?;
let opt = cx.argument::<JsObject>(0)?;
let callback = cx.argument::<JsFunction>(1)?;
let static_items: StaticConfigItem = neon_serde::from_value(&mut cx, opt.upcast())?;
let loader = opt
.get(&mut cx, "loader")?
.downcast::<JsFunction>()
.map(|f| {
let handler = EventHandler::new(&mut cx, undefined, f);
//
Box::new(spack::loaders::neon::NeonLoader {
swc: c.clone(),
handler,
}) as Box<dyn Load>
})
.unwrap_or_else(|_| {
Box::new(spack::loaders::swc::SwcLoader::new(
let loader = Box::new(spack::loaders::swc::SwcLoader::new(
c.clone(),
static_items
.config
@ -185,21 +169,16 @@ pub(crate) fn bundle(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
.as_ref()
.cloned()
.unwrap_or_else(|| {
serde_json::from_value(serde_json::Value::Object(Default::default()))
.unwrap()
serde_json::from_value(serde_json::Value::Object(Default::default())).unwrap()
}),
))
});
));
BundleTask {
cx.env.spawn(BundleTask {
swc: c.clone(),
config: ConfigItem {
loader,
resolver: Box::new(NodeResolver::new()) as Box<_>,
static_items,
},
}
.schedule(callback);
Ok(cx.undefined().upcast())
})
}

View File

@ -1,24 +1,56 @@
#![recursion_limit = "2048"]
extern crate neon;
extern crate neon_serde;
extern crate path_clean;
extern crate serde;
extern crate swc;
#[macro_use]
extern crate napi;
#[macro_use]
extern crate napi_derive;
#[cfg(all(unix, not(target_env = "musl")))]
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
#[cfg(windows)]
#[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
use anyhow::Error;
use backtrace::Backtrace;
use neon::prelude::*;
use napi::{CallContext, Env, JsFunction, JsObject, JsUndefined, Module};
use napi_serde::serialize;
use std::{env, panic::set_hook, sync::Arc};
use swc::{Compiler, TransformOutput};
use swc_common::{self, errors::Handler, FilePathMapping, SourceMap};
use swc_common::{
self,
errors::{ColorConfig, Handler},
sync::Lazy,
FilePathMapping, SourceMap,
};
mod bundle;
mod napi_serde;
mod parse;
mod print;
mod transform;
mod util;
fn init(_cx: MethodContext<JsUndefined>) -> NeonResult<ArcCompiler> {
// #[cfg(all(unix, not(target_env = "musl")))]
// #[global_allocator]
// static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
static COMPILER: Lazy<Arc<Compiler>> = Lazy::new(|| {
let cm = Arc::new(SourceMap::new(FilePathMapping::empty()));
let handler = Arc::new(Handler::with_tty_emitter(
ColorConfig::Always,
true,
false,
Some(cm.clone()),
));
Arc::new(Compiler::new(cm.clone(), handler))
});
register_module!(swc, init);
fn init(m: &mut Module) -> napi::Result<()> {
if cfg!(debug_assertions) || env::var("SWC_DEBUG").unwrap_or_else(|_| String::new()) == "1" {
set_hook(Box::new(|_panic_info| {
let backtrace = Backtrace::new();
@ -26,85 +58,43 @@ fn init(_cx: MethodContext<JsUndefined>) -> NeonResult<ArcCompiler> {
}));
}
let cm = Arc::new(SourceMap::new(FilePathMapping::empty()));
m.create_named_method("define", define_compiler_class)?;
let handler = Arc::new(Handler::with_tty_emitter(
swc_common::errors::ColorConfig::Always,
true,
false,
Some(cm.clone()),
));
m.create_named_method("transform", transform::transform)?;
m.create_named_method("transformSync", transform::transform_sync)?;
m.create_named_method("transformFile", transform::transform_file)?;
m.create_named_method("transformFileSync", transform::transform_file_sync)?;
let c = Compiler::new(cm.clone(), handler);
m.create_named_method("parse", parse::parse)?;
m.create_named_method("parseSync", parse::parse_sync)?;
m.create_named_method("parseFile", parse::parse_file)?;
m.create_named_method("parseFileSync", parse::parse_file_sync)?;
Ok(Arc::new(c))
m.create_named_method("print", print::print)?;
m.create_named_method("printSync", print::print_sync)?;
m.create_named_method("bundle", bundle::bundle)?;
Ok(())
}
pub fn complete_output<'a>(
mut cx: impl Context<'a>,
result: Result<TransformOutput, Error>,
) -> JsResult<'a, JsValue> {
match result {
Ok(output) => Ok(neon_serde::to_value(&mut cx, &output)?),
Err(err) => cx.throw_error(format!("{:?}", err)),
fn get_compiler(_ctx: &CallContext) -> Arc<Compiler> {
COMPILER.clone()
}
#[js_function]
fn define_compiler_class(ctx: CallContext) -> napi::Result<JsFunction> {
ctx.env.define_class("Compiler", construct_compiler, vec![])
}
#[js_function]
fn construct_compiler(ctx: CallContext<JsObject>) -> napi::Result<JsUndefined> {
// TODO: Assign swc::Compiler
ctx.env.get_undefined()
}
pub fn complete_output(env: &Env, output: TransformOutput) -> napi::Result<JsObject> {
serialize(&env, &output)?.coerce_to_object()
}
pub type ArcCompiler = Arc<Compiler>;
declare_types! {
pub class JsCompiler for ArcCompiler {
init(cx) {
init(cx)
}
method transform(cx) {
transform::transform(cx)
}
method transformSync(cx) {
transform::transform_sync(cx)
}
method transformFile(cx) {
transform::transform_file(cx)
}
method transformFileSync(cx) {
transform::transform_file_sync(cx)
}
method parse(cx) {
parse::parse(cx)
}
method parseSync(cx) {
parse::parse_sync(cx)
}
method parseFile(cx) {
parse::parse_file(cx)
}
method parseFileSync(cx) {
parse::parse_file_sync(cx)
}
method print(cx) {
print::print(cx)
}
method printSync(cx) {
print::print_sync(cx)
}
method bundle(cx) {
bundle::bundle(cx)
}
}
}
register_module!(mut cx, {
cx.export_class::<JsCompiler>("Compiler")?;
Ok(())
});

View File

@ -0,0 +1,56 @@
//! Serde for napi.
//!
//! THis will be extracted as a standalone crate in future.
pub use self::ser::serialize;
use std::{fmt, fmt::Display};
mod ser;
#[derive(Debug)]
pub(crate) enum Error {
Normal(anyhow::Error),
Napi(napi::Error),
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::Normal(v) => Display::fmt(v, f),
Error::Napi(v) => Display::fmt(&v.reason, f),
}
}
}
impl serde::ser::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
anyhow::Error::msg(msg.to_string()).into()
}
}
impl serde::de::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
anyhow::Error::msg(msg.to_string()).into()
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::Normal(v) => v.source(),
Error::Napi(_) => None,
}
}
}
impl From<anyhow::Error> for Error {
fn from(e: anyhow::Error) -> Self {
Self::Normal(e)
}
}
impl From<napi::Error> for Error {
fn from(e: napi::Error) -> Self {
Self::Napi(e)
}
}

View File

@ -0,0 +1,403 @@
use super::Error;
use napi::{Env, JsObject, JsUnknown, Status};
use serde::{
ser::{
SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple,
SerializeTupleStruct, SerializeTupleVariant,
},
Serialize, Serializer,
};
pub fn serialize<T>(env: &Env, node: &T) -> napi::Result<napi::JsUnknown>
where
T: Serialize,
{
let s = Ser { env };
match node.serialize(s) {
Ok(v) => Ok(v),
Err(err) => match err {
Error::Normal(v) => Err(napi::Error::new(Status::GenericFailure, format!("{:?}", v))),
Error::Napi(err) => Err(err),
},
}
}
struct Ser<'env> {
env: &'env Env,
}
#[doc(hidden)]
struct ArraySerializer<'env> {
env: &'env Env,
array: JsObject,
}
#[doc(hidden)]
struct TupleVariantSerializer<'env> {
outter_object: JsObject,
inner: ArraySerializer<'env>,
}
#[doc(hidden)]
struct MapSerializer<'env> {
env: &'env Env,
object: JsObject,
key_holder: JsObject,
}
#[doc(hidden)]
struct StructSerializer<'env> {
env: &'env Env,
object: JsObject,
}
#[doc(hidden)]
struct StructVariantSerializer<'env> {
outer_object: JsObject,
inner: StructSerializer<'env>,
}
impl<'env> Serializer for Ser<'env> {
type Ok = JsUnknown;
type Error = Error;
type SerializeSeq = ArraySerializer<'env>;
type SerializeTuple = ArraySerializer<'env>;
type SerializeTupleStruct = ArraySerializer<'env>;
type SerializeTupleVariant = TupleVariantSerializer<'env>;
type SerializeMap = MapSerializer<'env>;
type SerializeStruct = StructSerializer<'env>;
type SerializeStructVariant = StructVariantSerializer<'env>;
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
Ok(self.env.get_boolean(v)?.into_unknown()?)
}
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_int32(v as _)?.into_unknown()?)
}
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_int32(v as _)?.into_unknown()?)
}
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_int32(v)?.into_unknown()?)
}
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_int64(v)?.into_unknown()?)
}
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_uint32(v as _)?.into_unknown()?)
}
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_uint32(v as _)?.into_unknown()?)
}
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_uint32(v as _)?.into_unknown()?)
}
fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_int64(v as _)?.into_unknown()?)
}
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_double(v as _)?.into_unknown()?)
}
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_double(v as _)?.into_unknown()?)
}
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
Ok(self
.env
.create_string_from_std(v.to_string())?
.into_unknown()?)
}
fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
Ok(self.env.create_string(v)?.into_unknown()?)
}
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok, Self::Error> {
Ok(self
.env
.create_buffer_with_data(v.to_vec())?
.into_unknown()?)
}
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
Ok(self.env.get_null()?.into_unknown()?)
}
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: serde::Serialize,
{
value.serialize(self)
}
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
Ok(self.env.get_null()?.into_unknown()?)
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
Ok(self.env.get_null()?.into_unknown()?)
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: serde::Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: serde::Serialize,
{
let mut obj = self.env.create_object()?;
let value = serialize(&self.env, &value)?;
obj.set_named_property(variant, value)?;
Ok(obj.into_unknown()?)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Ok(ArraySerializer {
env: self.env,
array: self.env.create_array_with_length(len.unwrap_or(0))?,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
Ok(ArraySerializer {
env: self.env,
array: self.env.create_array_with_length(len)?,
})
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
Ok(ArraySerializer {
env: self.env,
array: self.env.create_array_with_length(len)?,
})
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Ok(TupleVariantSerializer {
outter_object: self.env.create_object()?,
inner: ArraySerializer {
env: self.env,
array: self.env.create_array_with_length(len)?,
},
})
}
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Ok(MapSerializer {
env: self.env,
object: self.env.create_object()?,
key_holder: self.env.create_object()?,
})
}
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
Ok(StructSerializer {
env: self.env,
object: self.env.create_object()?,
})
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Ok(StructVariantSerializer {
outer_object: self.env.create_object()?,
inner: StructSerializer {
env: self.env,
object: self.env.create_object()?,
},
})
}
}
impl SerializeSeq for ArraySerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
let value = serialize(self.env, &value)?;
let cur_len = self.array.get_array_length()?;
self.array.set_index(cur_len as _, value)?;
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.array.into_unknown()?)
}
}
impl SerializeTuple for ArraySerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
SerializeSeq::end(self)
}
}
impl SerializeTupleStruct for ArraySerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
SerializeSeq::end(self)
}
}
impl SerializeTupleVariant for TupleVariantSerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
SerializeSeq::serialize_element(&mut self.inner, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.outter_object.into_unknown()?)
}
}
impl SerializeMap for MapSerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
let key = serialize(self.env, &key)?;
self.key_holder.set_named_property("key", key)?;
Ok(())
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
let key = self.key_holder.get_named_property("key")?;
let value = serialize(self.env, &value)?;
self.object.set_property(key, value)?;
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.object.into_unknown()?)
}
}
impl SerializeStruct for StructSerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
let value = serialize(self.env, &value)?;
self.object.set_named_property(key, value)?;
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.object.into_unknown()?)
}
}
impl SerializeStructVariant for StructVariantSerializer<'_> {
type Ok = JsUnknown;
type Error = Error;
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: serde::Serialize,
{
SerializeStruct::serialize_field(&mut self.inner, key, value)?;
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(self.outer_object.into_unknown()?)
}
}

View File

@ -1,6 +1,9 @@
use crate::JsCompiler;
use anyhow::{Context as _, Error};
use neon::prelude::*;
use crate::{
get_compiler,
util::{CtxtExt, MapErr},
};
use anyhow::Context as _;
use napi::{CallContext, Env, JsObject, JsString, Task};
use std::{
path::{Path, PathBuf},
sync::Arc,
@ -23,116 +26,93 @@ pub struct ParseFileTask {
pub options: ParseOptions,
}
pub fn complete_parse<'a>(
mut cx: impl Context<'a>,
result: Result<Program, Error>,
c: &Compiler,
) -> JsResult<'a, JsValue> {
c.run(|| match result {
Ok(program) => Ok(cx
.string(serde_json::to_string(&program).expect("failed to serialize Program"))
.upcast()),
Err(err) => cx.throw_error(format!("{:?}", err)),
})
pub fn complete_parse<'a>(env: &Env, program: Program, _c: &Compiler) -> napi::Result<JsString> {
let s = serde_json::to_string(&program)
.context("failed to serialize Program")
.convert_err()?;
env.create_string_from_std(s)
}
impl Task for ParseTask {
type Output = Program;
type Error = Error;
type JsEvent = JsValue;
type JsValue = JsString;
fn perform(&self) -> Result<Self::Output, Self::Error> {
self.c.run(|| {
self.c.parse_js(
fn compute(&mut self) -> napi::Result<Self::Output> {
let program = self
.c
.parse_js(
self.fm.clone(),
self.options.target,
self.options.syntax,
self.options.is_module,
self.options.comments,
)
})
.convert_err()?;
Ok(program)
}
fn complete(
self,
cx: TaskContext,
result: Result<Self::Output, Self::Error>,
) -> JsResult<Self::JsEvent> {
complete_parse(cx, result, &self.c)
fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result<Self::JsValue> {
complete_parse(env, result, &self.c)
}
}
impl Task for ParseFileTask {
type Output = Program;
type Error = Error;
type JsEvent = JsValue;
type JsValue = JsString;
fn perform(&self) -> Result<Self::Output, Self::Error> {
fn compute(&mut self) -> napi::Result<Self::Output> {
self.c.run(|| {
let fm = self
.c
.cm
.load_file(&self.path)
.context("failed to read module")?;
.context("failed to read module")
.convert_err()?;
self.c.parse_js(
self.c
.parse_js(
fm,
self.options.target,
self.options.syntax,
self.options.is_module,
self.options.comments,
)
.convert_err()
})
}
fn complete(
self,
cx: TaskContext,
result: Result<Self::Output, Self::Error>,
) -> JsResult<Self::JsEvent> {
complete_parse(cx, result, &self.c)
fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result<Self::JsValue> {
complete_parse(env, result, &self.c)
}
}
pub fn parse(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let src = cx.argument::<JsString>(0)?;
let options_arg = cx.argument::<JsValue>(1)?;
let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?;
let callback = cx.argument::<JsFunction>(2)?;
#[js_function(2)]
pub fn parse(ctx: CallContext) -> napi::Result<JsObject> {
let c = get_compiler(&ctx);
let src = ctx.get::<JsString>(0)?;
let options: ParseOptions = ctx.get_deserialized(1)?;
let this = cx.this();
{
let guard = cx.lock();
let c = this.borrow(&guard);
let fm =
c.cm.new_source_file(FileName::Anon, src.as_str()?.to_string());
let fm = c.cm.new_source_file(FileName::Anon, src.value());
ParseTask {
ctx.env.spawn(ParseTask {
c: c.clone(),
fm,
options,
}
.schedule(callback);
};
Ok(cx.undefined().upcast())
})
}
pub fn parse_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let c;
let this = cx.this();
{
let guard = cx.lock();
let compiler = this.borrow(&guard);
c = compiler.clone();
}
#[js_function(2)]
pub fn parse_sync(cx: CallContext) -> napi::Result<JsString> {
let c = get_compiler(&cx);
c.run(|| {
let src = cx.argument::<JsString>(0)?;
let options_arg = cx.argument::<JsValue>(1)?;
let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?;
let src = cx.get::<JsString>(0)?.as_str()?.to_string();
let options: ParseOptions = cx.get_deserialized(1)?;
let program = {
let fm = c.cm.new_source_file(FileName::Anon, src.value());
let fm = c.cm.new_source_file(FileName::Anon, src);
c.parse_js(
fm,
options.target,
@ -140,28 +120,22 @@ pub fn parse_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
options.is_module,
options.comments,
)
};
}
.convert_err()?;
complete_parse(cx, program, &c)
complete_parse(&cx.env, program, &c)
})
}
pub fn parse_file_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let c;
let this = cx.this();
{
let guard = cx.lock();
let compiler = this.borrow(&guard);
c = compiler.clone();
}
c.run(|| {
let path = cx.argument::<JsString>(0)?;
let options_arg = cx.argument::<JsValue>(1)?;
let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?;
#[js_function(2)]
pub fn parse_file_sync(cx: CallContext) -> napi::Result<JsString> {
let c = get_compiler(&cx);
let path = cx.get::<JsString>(0)?;
let options: ParseOptions = cx.get_deserialized(1)?;
let program = {
let fm =
c.cm.load_file(Path::new(&path.value()))
c.cm.load_file(Path::new(path.as_str()?))
.expect("failed to read program file");
c.parse_js(
@ -171,30 +145,17 @@ pub fn parse_file_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
options.is_module,
options.comments,
)
};
}
.convert_err()?;
complete_parse(cx, program, &c)
})
complete_parse(cx.env, program, &c)
}
pub fn parse_file(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let path = cx.argument::<JsString>(0)?;
let options_arg = cx.argument::<JsValue>(1)?;
let options: ParseOptions = neon_serde::from_value(&mut cx, options_arg)?;
let callback = cx.argument::<JsFunction>(2)?;
#[js_function(2)]
pub fn parse_file(cx: CallContext) -> napi::Result<JsObject> {
let c = get_compiler(&cx);
let path = PathBuf::from(cx.get::<JsString>(0)?.as_str()?);
let options: ParseOptions = cx.get_deserialized(1)?;
let this = cx.this();
{
let guard = cx.lock();
let c = this.borrow(&guard);
ParseFileTask {
c: c.clone(),
path: path.value().into(),
options,
}
.schedule(callback);
};
Ok(cx.undefined().upcast())
cx.env.spawn(ParseFileTask { c, path, options })
}

View File

@ -1,6 +1,8 @@
use crate::{complete_output, JsCompiler};
use anyhow::Error;
use neon::prelude::*;
use crate::{
complete_output, get_compiler,
util::{CtxtExt, MapErr},
};
use napi::{CallContext, Env, JsObject, JsString, Task};
use std::sync::Arc;
use swc::{
config::{Options, SourceMapsConfig},
@ -18,11 +20,11 @@ pub struct PrintTask {
impl Task for PrintTask {
type Output = TransformOutput;
type Error = Error;
type JsEvent = JsValue;
fn perform(&self) -> Result<Self::Output, Self::Error> {
self.c.run(|| {
self.c.print(
type JsValue = JsObject;
fn compute(&mut self) -> napi::Result<Self::Output> {
self.c
.print(
&self.program,
self.options
.source_maps
@ -36,59 +38,39 @@ impl Task for PrintTask {
.minify
.unwrap_or(false),
)
})
.convert_err()
}
fn complete(
self,
cx: TaskContext,
result: Result<Self::Output, Self::Error>,
) -> JsResult<Self::JsEvent> {
complete_output(cx, result)
fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result<Self::JsValue> {
complete_output(env, result)
}
}
pub fn print(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let program = cx.argument::<JsString>(0)?;
#[js_function(2)]
pub fn print(cx: CallContext) -> napi::Result<JsObject> {
let c = get_compiler(&cx);
let program = cx.get::<JsString>(0)?;
let program: Program =
serde_json::from_str(&program.value()).expect("failed to deserialize Program");
serde_json::from_str(program.as_str()?).expect("failed to deserialize Program");
let options = cx.argument::<JsValue>(1)?;
let options: Options = neon_serde::from_value(&mut cx, options)?;
let options: Options = cx.get_deserialized(1)?;
let callback = cx.argument::<JsFunction>(2)?;
let this = cx.this();
{
let guard = cx.lock();
let c = this.borrow(&guard);
PrintTask {
cx.env.spawn(PrintTask {
c: c.clone(),
program,
options,
}
.schedule(callback)
})
}
Ok(cx.undefined().upcast())
}
#[js_function(2)]
pub fn print_sync(cx: CallContext) -> napi::Result<JsObject> {
let c = get_compiler(&cx);
pub fn print_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
let c;
let this = cx.this();
{
let guard = cx.lock();
let compiler = this.borrow(&guard);
c = compiler.clone();
}
c.run(|| {
let program = cx.argument::<JsString>(0)?;
let program = cx.get::<JsString>(0)?;
let program: Program =
serde_json::from_str(&program.value()).expect("failed to deserialize Program");
serde_json::from_str(&program.as_str()?).expect("failed to deserialize Program");
let options = cx.argument::<JsValue>(1)?;
let options: Options = neon_serde::from_value(&mut cx, options)?;
let options: Options = cx.get_deserialized(1)?;
let result = {
c.print(
@ -100,7 +82,7 @@ pub fn print_sync(mut cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
None,
options.config.unwrap_or_default().minify.unwrap_or(false),
)
};
complete_output(cx, result)
})
}
.convert_err()?;
complete_output(cx.env, result)
}

View File

@ -1,6 +1,9 @@
use crate::{complete_output, JsCompiler};
use crate::{
complete_output, get_compiler,
util::{CtxtExt, MapErr},
};
use anyhow::{Context as _, Error};
use neon::prelude::*;
use napi::{CallContext, Env, JsBoolean, JsObject, JsString, Task};
use path_clean::clean;
use std::{
path::{Path, PathBuf},
@ -29,11 +32,11 @@ pub struct TransformTask {
impl Task for TransformTask {
type Output = TransformOutput;
type Error = Error;
type JsEvent = JsValue;
type JsValue = JsObject;
fn perform(&self) -> Result<Self::Output, Self::Error> {
self.c.run(|| match self.input {
fn compute(&mut self) -> napi::Result<Self::Output> {
self.c
.run(|| match self.input {
Input::Program(ref s) => {
let program: Program =
serde_json::from_str(&s).expect("failed to deserialize Program");
@ -48,76 +51,56 @@ impl Task for TransformTask {
Input::Source(ref s) => self.c.process_js_file(s.clone(), &self.options),
})
.convert_err()
}
fn complete(
self,
cx: TaskContext,
result: Result<Self::Output, Self::Error>,
) -> JsResult<Self::JsEvent> {
complete_output(cx, result)
fn resolve(&self, env: &mut Env, result: Self::Output) -> napi::Result<Self::JsValue> {
complete_output(env, result)
}
}
/// returns `compiler, (src / path), options, plugin, callback`
pub fn schedule_transform<F>(mut cx: MethodContext<JsCompiler>, op: F) -> JsResult<JsValue>
pub fn schedule_transform<F>(cx: CallContext, op: F) -> napi::Result<JsObject>
where
F: FnOnce(&Arc<Compiler>, String, bool, Options) -> TransformTask,
{
let c;
let this = cx.this();
{
let guard = cx.lock();
c = this.borrow(&guard).clone();
};
let c = get_compiler(&cx);
let s = cx.argument::<JsString>(0)?.value();
let is_module = cx.argument::<JsBoolean>(1)?;
let options_arg = cx.argument::<JsValue>(2)?;
let s = cx.get::<JsString>(0)?.as_str()?.to_string();
let is_module = cx.get::<JsBoolean>(1)?;
let options: Options = cx.get_deserialized(2)?;
let options: Options = neon_serde::from_value(&mut cx, options_arg)?;
let callback = cx.argument::<JsFunction>(3)?;
let task = op(&c, s, is_module.get_value()?, options);
let task = op(&c, s, is_module.value(), options);
task.schedule(callback);
Ok(cx.undefined().upcast())
cx.env.spawn(task)
}
pub fn exec_transform<F>(mut cx: MethodContext<JsCompiler>, op: F) -> JsResult<JsValue>
pub fn exec_transform<F>(cx: CallContext, op: F) -> napi::Result<JsObject>
where
F: FnOnce(&Compiler, String, &Options) -> Result<Arc<SourceFile>, Error>,
{
let s = cx.argument::<JsString>(0)?;
let is_module = cx.argument::<JsBoolean>(1)?;
let options: Options = match cx.argument_opt(2) {
Some(v) => neon_serde::from_value(&mut cx, v)?,
None => {
let obj = cx.empty_object().upcast();
neon_serde::from_value(&mut cx, obj)?
}
};
let c = get_compiler(&cx);
let this = cx.this();
let output = {
let guard = cx.lock();
let c = this.borrow(&guard);
c.run(|| {
if is_module.value() {
let s = cx.get::<JsString>(0)?;
let is_module = cx.get::<JsBoolean>(1)?;
let options: Options = cx.get_deserialized(2)?;
let output = c.run(|| -> napi::Result<_> {
if is_module.get_value()? {
let program: Program =
serde_json::from_str(&s.value()).expect("failed to deserialize Program");
c.process_js(program, &options)
serde_json::from_str(s.as_str()?).expect("failed to deserialize Program");
c.process_js(program, &options).convert_err()
} else {
let fm = op(&c, s.value(), &options).expect("failed to create fm");
c.process_js_file(fm, &options)
let fm = op(&c, s.as_str()?.to_string(), &options).expect("failed to create fm");
c.process_js_file(fm, &options).convert_err()
}
})
};
})?;
complete_output(cx, output)
complete_output(cx.env, output)
}
pub fn transform(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
#[js_function(4)]
pub fn transform(cx: CallContext) -> napi::Result<JsObject> {
schedule_transform(cx, |c, src, is_module, options| {
let input = if is_module {
Input::Program(src)
@ -140,7 +123,8 @@ pub fn transform(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
})
}
pub fn transform_sync(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
#[js_function(4)]
pub fn transform_sync(cx: CallContext) -> napi::Result<JsObject> {
exec_transform(cx, |c, src, options| {
Ok(c.cm.new_source_file(
if options.filename.is_empty() {
@ -153,7 +137,8 @@ pub fn transform_sync(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
})
}
pub fn transform_file(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
#[js_function(4)]
pub fn transform_file(cx: CallContext) -> napi::Result<JsObject> {
schedule_transform(cx, |c, path, _, options| {
let path = clean(&path);
@ -165,7 +150,8 @@ pub fn transform_file(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
})
}
pub fn transform_file_sync(cx: MethodContext<JsCompiler>) -> JsResult<JsValue> {
#[js_function(4)]
pub fn transform_file_sync(cx: CallContext) -> napi::Result<JsObject> {
exec_transform(cx, |c, path, _| {
Ok(c.cm
.load_file(Path::new(&path))

36
native/src/util.rs Normal file
View File

@ -0,0 +1,36 @@
use anyhow::Context;
use napi::{CallContext, JsBuffer, NapiValue, Status};
use serde::de::DeserializeOwned;
pub trait MapErr<T>: Into<Result<T, anyhow::Error>> {
fn convert_err(self) -> napi::Result<T> {
self.into()
.map_err(|err| napi::Error::new(Status::GenericFailure, format!("{:?}", err)))
}
}
impl<T> MapErr<T> for Result<T, anyhow::Error> {}
pub trait CtxtExt {
/// Currently this uses JsBuffer
fn get_deserialized<T>(&self, index: usize) -> napi::Result<T>
where
T: DeserializeOwned;
}
impl<V> CtxtExt for CallContext<'_, V>
where
V: NapiValue,
{
fn get_deserialized<T>(&self, index: usize) -> napi::Result<T>
where
T: DeserializeOwned,
{
let buffer = self.get::<JsBuffer>(index)?;
let v = serde_json::from_slice(&buffer)
.with_context(|| format!("Argument at `{}` is not JsBuffer", index))
.convert_err()?;
Ok(v)
}
}

View File

@ -1,7 +1,10 @@
const swc = require("../../..");
const path = require('path');
const os = require('os');
if (os.platform() !== 'win32') {
it('should handle multiple entries on same level', async () => {
const result = await swc.bundle({
entry: {
@ -32,3 +35,8 @@ it('should handle multiple entries on different level', async () => {
expect(result.web).toBeTruthy();
expect(result.web.code).toContain(`../common`);
});
} else {
it('should not be error on windows', async () => {
expect('I hate windows').toBeTruthy()
});
}

View File

@ -1,490 +0,0 @@
/*!
* node-swc: lib/extensions.js
*/
var eol = require("os").EOL,
fs = require("fs"),
pkg = require("../../package.json"),
mkdir = require("mkdirp"),
path = require("path"),
defaultBinaryDir = path.join(__dirname, "..", "..", "native"),
trueCasePathSync = require("true-case-path");
/**
* Get the human readable name of the Platform that is running
*
* @param {string} platform - An OS platform to match, or null to fallback to
* the current process platform
* @return {Object} The name of the platform if matched, false otherwise
*
* @api public
*/
function getHumanPlatform(platform) {
switch (platform || process.platform) {
case "darwin":
return "OS X";
case "freebsd":
return "FreeBSD";
case "linux":
return "Linux";
case "linux_musl":
return "Linux/musl";
case "win32":
return "Windows";
default:
return false;
}
}
/**
* Provides a more readable version of the architecture
*
* @param {string} arch - An instruction architecture name to match, or null to
* lookup the current process architecture
* @return {Object} The value of the process architecture, or false if unknown
*
* @api public
*/
function getHumanArchitecture(arch) {
switch (arch || process.arch) {
case "ia32":
return "32-bit";
case "x86":
return "32-bit";
case "x64":
return "64-bit";
default:
return false;
}
}
/**
* Get the friendly name of the Node environment being run
*
* @param {Object} abi - A Node Application Binary Interface value, or null to
* fallback to the current Node ABI
* @return {Object} Returns a string name of the Node environment or false if
* unmatched
*
* @api public
*/
function getHumanNodeVersion(abi) {
switch (parseInt(abi || process.versions.modules, 10)) {
case 11:
return "Node 0.10.x";
case 14:
return "Node 0.12.x";
case 42:
return "io.js 1.x";
case 43:
return "io.js 1.1.x";
case 44:
return "io.js 2.x";
case 45:
return "io.js 3.x";
case 46:
return "Node.js 4.x";
case 47:
return "Node.js 5.x";
case 48:
return "Node.js 6.x";
case 49:
return "Electron 1.3.x";
case 50:
return "Electron 1.4.x";
case 51:
return "Node.js 7.x";
case 53:
return "Electron 1.6.x";
case 57:
return "Node.js 8.x";
case 59:
return "Node.js 9.x";
case 64:
return "Node.js 10.x";
case 67:
return "Node.js 11.x";
default:
return false;
}
}
/**
* Get a human readable description of where node-swc is running to support
* user error reporting when something goes wrong
*
* @param {string} env - The name of the native bindings that is to be parsed
* @return {string} A description of what os, architecture, and Node version
* that is being run
*
* @api public
*/
function getHumanEnvironment(env) {
var binding = env.replace(/_binding\.node$/, ""),
parts = binding.split("-"),
platform = getHumanPlatform(parts[0]),
arch = getHumanArchitecture(parts[1]),
runtime = getHumanNodeVersion(parts[2]);
if (parts.length !== 3) {
return "Unknown environment (" + binding + ")";
}
if (!platform) {
platform = "Unsupported platform (" + parts[0] + ")";
}
if (!arch) {
arch = "Unsupported architecture (" + parts[1] + ")";
}
if (!runtime) {
runtime = "Unsupported runtime (" + parts[2] + ")";
}
return [platform, arch, "with", runtime].join(" ");
}
/**
* Get the value of the binaries under the default path
*
* @return {Array} The currently installed node-swc bindings
*
* @api public
*/
function getInstalledBinaries() {
return fs.readdirSync(getBinaryDir());
}
/**
* Check that an environment matches the whitelisted values or the current
* environment if no parameters are passed
*
* @param {string} platform - The name of the OS platform(darwin, win32, etc...)
* @param {string} arch - The instruction set architecture of the Node environment
* @param {string} abi - The Node Application Binary Interface
* @return {Boolean} True, if node-swc supports the current platform, false otherwise
*
* @api public
*/
function isSupportedEnvironment(platform, arch, abi) {
return (
false !== getHumanPlatform(platform) &&
false !== getHumanArchitecture(arch) &&
false !== getHumanNodeVersion(abi)
);
}
/**
* Get the value of a CLI argument
*
* @param {String} name
* @param {Array} args
* @api private
*/
function getArgument(name, args) {
var flags = args || process.argv.slice(2),
index = flags.lastIndexOf(name);
if (index === -1 || index + 1 >= flags.length) {
return null;
}
return flags[index + 1];
}
/**
* Get binary name.
* If environment variable SWC_BINARY_NAME,
* .npmrc variable swc_binary_name or
* process argument --binary-name is provided,
* return it as is, otherwise make default binary
* name: {platform}-{arch}-{v8 version}.node
*
* @api public
*/
function getBinaryName() {
var binaryName,
variant,
platform = process.platform;
if (getArgument("--swc-binary-name")) {
binaryName = getArgument("--swc-binary-name");
} else if (process.env.SWC_BINARY_NAME) {
binaryName = process.env.SWC_BINARY_NAME;
} else if (process.env.npm_config_swc_binary_name) {
binaryName = process.env.npm_config_swc_binary_name;
} else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryName) {
binaryName = pkg.nodeSwcConfig.binaryName;
} else {
variant = getPlatformVariant();
if (variant) {
platform += "_" + variant;
}
binaryName = [
platform,
"-",
process.arch,
"-",
process.versions.modules,
].join("");
}
return [binaryName, ".node"].join("");
}
/**
* Determine the URL to fetch binary file from.
* By default fetch from the node-swc distribution
* site on GitHub.
*
* The default URL can be overriden using
* the environment variable SWC_BINARY_SITE,
* .npmrc variable swc_binary_site or
* or a command line option --swc-binary-site:
*
* node scripts/install.js --swc-binary-site http://example.com/
*
* The URL should to the mirror of the repository
* laid out as follows:
*
* SWC_BINARY_SITE/
*
* v3.0.0
* v3.0.0/freebsd-x64-14_binding.node
* ....
* v3.0.0
* v3.0.0/freebsd-ia32-11_binding.node
* v3.0.0/freebsd-x64-42_binding.node
* ... etc. for all supported versions and platforms
*
* @api public
*/
function getBinaryUrl() {
var site =
getArgument("--swc-binary-site") ||
process.env.SWC_BINARY_SITE ||
process.env.npm_config_swc_binary_site ||
(pkg.nodeSwcConfig && pkg.nodeSwcConfig.binarySite) ||
"https://github.com/swc-project/node-swc/releases/download";
return [site, "v" + pkg.version, getBinaryName()].join("/");
}
/**
* Get binary dir.
* If environment variable SWC_BINARY_DIR,
* .npmrc variable SWC_BINARY_DIR or
* process argument --swc-binary-dir is provided,
* select it by appending binary name, otherwise
* use default binary dir.
* Once the primary selection is made, check if
* callers wants to throw if file not exists before
* returning.
*
* @api public
*/
function getBinaryDir() {
var binaryDir;
if (getArgument("--swc-binary-dir")) {
binaryDir = getArgument("--swc-binary-dir");
} else if (process.env.SWC_BINARY_DIR) {
binaryDir = process.env.SWC_BINARY_DIR;
} else if (process.env.npm_config_SWC_BINARY_DIR) {
binaryDir = process.env.npm_config_SWC_BINARY_DIR;
} else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryDir) {
binaryDir = pkg.nodeSwcConfig.binaryDir;
} else {
binaryDir = defaultBinaryDir;
}
return binaryDir;
}
/**
* Get binary path.
* If environment variable SWC_BINARY_PATH,
* .npmrc variable SWC_BINARY_PATH or
* process argument --swc-binary-path is provided,
* select it by appending binary name, otherwise
* make default binary path using binary name.
* Once the primary selection is made, check if
* callers wants to throw if file not exists before
* returning.
*
* @api public
*/
function getBinaryPath() {
var binaryPath;
if (getArgument("--swc-binary-path")) {
binaryPath = getArgument("--swc-binary-path");
} else if (process.env.SWC_BINARY_PATH) {
binaryPath = process.env.SWC_BINARY_PATH;
} else if (process.env.npm_config_SWC_BINARY_PATH) {
binaryPath = process.env.npm_config_SWC_BINARY_PATH;
} else if (pkg.nodeSwcConfig && pkg.nodeSwcConfig.binaryPath) {
binaryPath = pkg.nodeSwcConfig.binaryPath;
} else {
binaryPath = path.join(getBinaryDir(), "index.node");
}
if (process.versions.modules < 46) {
return binaryPath;
}
try {
return trueCasePathSync(binaryPath) || binaryPath;
} catch (e) {
return binaryPath;
}
}
/**
* An array of paths suitable for use as a local disk cache of the binding.
*
* @return {[]String} an array of paths
* @api public
*/
function getCachePathCandidates() {
return [
process.env.npm_config_swc_binary_cache,
process.env.npm_config_cache,
].filter(function (_) {
return _;
});
}
/**
* The most suitable location for caching the binding on disk.
*
* Given the candidates directories provided by `getCachePathCandidates()` this
* returns the first writable directory. By treating the candidate directories
* as a prioritised list this method is deterministic, assuming no change to the
* local environment.
*
* @return {String} directory to cache binding
* @api public
*/
function getBinaryCachePath() {
var i,
cachePath,
cachePathCandidates = getCachePathCandidates();
for (i = 0; i < cachePathCandidates.length; i++) {
cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version);
try {
mkdir.sync(cachePath);
return cachePath;
} catch (e) {
// Directory is not writable, try another
}
}
return "";
}
/**
* The cached binding
*
* Check the candidates directories provided by `getCachePathCandidates()` for
* the binding file, if it exists. By treating the candidate directories
* as a prioritised list this method is deterministic, assuming no change to the
* local environment.
*
* @return {String} path to cached binary
* @api public
*/
function getCachedBinary() {
var i,
cachePath,
cacheBinary,
cachePathCandidates = getCachePathCandidates(),
binaryName = getBinaryName();
for (i = 0; i < cachePathCandidates.length; i++) {
cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version);
cacheBinary = path.join(cachePath, binaryName);
if (fs.existsSync(cacheBinary)) {
return cacheBinary;
}
}
return "";
}
/**
* Does the supplied binary path exist
*
* @param {String} binaryPath
* @api public
*/
function hasBinary(binaryPath) {
return fs.existsSync(binaryPath);
}
/**
* Get Swc version information
*
* @api public
*/
function getVersionInfo(binding) {
return [
["node-swc", pkg.version, "(Wrapper)", "[JavaScript]"].join("\t"),
// ['libswc ', binding.libswcVersion(), '(Swc Compiler)', '[C/C++]'].join('\t'),
].join(eol);
}
/**
* Gets the platform variant, currently either an empty string or 'musl' for Linux/musl platforms.
*
* @api public
*/
function getPlatformVariant() {
var contents = "";
if (process.platform !== "linux") {
return "";
}
try {
contents = fs.readFileSync(process.execPath);
// Buffer.indexOf was added in v1.5.0 so cast to string for old node
// Delay contents.toStrings because it's expensive
if (!contents.indexOf) {
contents = contents.toString();
}
if (contents.indexOf("libc.musl-x86_64.so.1") !== -1) {
return "musl";
}
} catch (err) {} // eslint-disable-line no-empty
return "";
}
module.exports.hasBinary = hasBinary;
module.exports.getBinaryUrl = getBinaryUrl;
module.exports.getBinaryName = getBinaryName;
module.exports.getBinaryDir = getBinaryDir;
module.exports.getBinaryPath = getBinaryPath;
module.exports.getBinaryCachePath = getBinaryCachePath;
module.exports.getCachedBinary = getCachedBinary;
module.exports.getCachePathCandidates = getCachePathCandidates;
module.exports.getVersionInfo = getVersionInfo;
module.exports.getHumanEnvironment = getHumanEnvironment;
module.exports.getInstalledBinaries = getInstalledBinaries;
module.exports.isSupportedEnvironment = isSupportedEnvironment;

View File

@ -8,10 +8,45 @@ import {
Program,
} from "./types";
export * from "./types";
import { wrapNativeSuper } from "./util";
import { BundleInput, compileBundleOptions } from "./spack";
import { loadBinding } from '@node-rs/helper';
import { platform } from 'os';
const native = require("./native");
let bindings: any
let linuxError = null
try {
bindings = loadBinding(__dirname, 'swc')
} catch (e) {
const platformName = platform()
try {
bindings = require(`@swc/core-${platformName}`)
} catch (e) {
if (platformName !== 'linux') {
throw new TypeError('Not compatible with your platform. Error message: ' + e.message)
} else {
linuxError = e
}
}
}
if (!bindings) {
try {
require.resolve('@swc/core-linux-musl')
} catch (e) {
throw new TypeError(
`Could not load @swc/core-linux, You may need add @swc/core-linux-musl to optionalDependencies of your project`,
)
}
try {
bindings = require('@swc/core-linux-musl')
} catch (e) {
throw new TypeError(
`Linux glibc version load error: ${linuxError.message}; Linux musl version load error: Error message: ${e.message}`,
)
}
}
/**
* Version of the swc binding.
@ -28,26 +63,18 @@ export function plugins(ps: Plugin[]): Plugin {
};
}
export class Compiler extends wrapNativeSuper(native.Compiler) {
public constructor() {
super();
}
export class Compiler {
parse(
src: string,
options: ParseOptions & { isModule: false }
): Promise<Script>;
parse(src: string, options?: ParseOptions): Promise<Module>;
parse(src: string, options?: ParseOptions): Promise<Program> {
async parse(src: string, options?: ParseOptions): Promise<Program> {
options = options || { syntax: "ecmascript" };
options.syntax = options.syntax || "ecmascript";
return new Promise((resolve, reject) => {
super.parse(src, options, (err: any, value: string) => {
if (!!err) return reject(err);
resolve(JSON.parse(value));
});
});
const res = await bindings.parse(src, toBuffer(options));
return JSON.parse(res);
}
parseSync(src: string, options: ParseOptions & { isModule: false }): Script;
@ -55,7 +82,8 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
parseSync(src: string, options?: ParseOptions): Program {
options = options || { syntax: "ecmascript" };
options.syntax = options.syntax || "ecmascript";
return JSON.parse(super.parseSync(src, options));
return JSON.parse(bindings.parseSync(src, toBuffer(options)));
}
parseFile(
@ -67,12 +95,9 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
options = options || { syntax: "ecmascript" };
options.syntax = options.syntax || "ecmascript";
return new Promise((resolve, reject) => {
super.parseFile(path, options, (err: any, value: string) => {
if (!!err) return reject(err);
resolve(JSON.parse(value));
});
});
const res = bindings.parseFile(path, toBuffer(options));
return JSON.parse(res);
}
parseFileSync(
@ -83,22 +108,18 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
parseFileSync(path: string, options?: ParseOptions): Program {
options = options || { syntax: "ecmascript" };
options.syntax = options.syntax || "ecmascript";
return JSON.parse(super.parseFileSync(path, options));
return JSON.parse(bindings.parseFileSync(path, toBuffer(options)));
}
/**
* Note: this method should be invoked on the compiler instance used
* for `parse()` / `parseSync()`.
*/
print(m: Program, options?: Options): Promise<Output> {
async print(m: Program, options?: Options): Promise<Output> {
options = options || {};
return new Promise((resolve, reject) => {
super.print(JSON.stringify(m), options, (err: any, value: Output) => {
if (!!err) return reject(err);
resolve(value);
});
});
return bindings.print(JSON.stringify(m), toBuffer(options))
}
/**
@ -108,7 +129,7 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
printSync(m: Program, options?: Options): Output {
options = options || {};
return super.printSync(JSON.stringify(m), options);
return bindings.printSync(JSON.stringify(m), toBuffer(options));
}
async transform(src: string | Program, options?: Options): Promise<Output> {
@ -131,17 +152,7 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
return this.transform(plugin(m), options);
}
return new Promise((resolve, reject) => {
super.transform(
isModule ? JSON.stringify(src) : src,
isModule,
options,
(err: any, value: Output) => {
if (!!err) return reject(err);
resolve(value);
}
);
});
return bindings.transform(isModule ? JSON.stringify(src) : src, isModule, toBuffer(options))
}
transformSync(src: string | Program, options?: Options): Output {
@ -162,11 +173,11 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
return this.transformSync(plugin(m), options);
}
return super.transformSync(
return bindings.transformSync(
isModule ? JSON.stringify(src) : src,
isModule,
options
);
toBuffer(options),
)
}
async transformFile(path: string, options?: Options): Promise<Output> {
@ -185,17 +196,7 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
return this.transform(plugin(m), options);
}
return new Promise((resolve, reject) => {
super.transformFile(
path,
/* isModule */ false,
options,
(err: any, value: Output) => {
if (!!err) return reject(err);
resolve(value);
}
);
});
return bindings.transformFile(path, false, toBuffer(options))
}
transformFileSync(path: string, options?: Options): Output {
@ -214,7 +215,7 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
return this.transformSync(plugin(m), options);
}
return super.transformFileSync(path, /* isModule */ false, options);
return bindings.transformFileSync(path, /* isModule */ false, toBuffer(options));
}
@ -235,14 +236,9 @@ export class Compiler extends wrapNativeSuper(native.Compiler) {
return obj;
}
return new Promise((resolve, reject) => {
super.bundle({
return bindings.bundle(toBuffer({
...opts,
}, (err: any, value: any) => {
if (err) return reject(err);
resolve(value)
})
});
}));
}
}
@ -338,3 +334,7 @@ export const DEFAULT_EXTENSIONS = Object.freeze([
".ts",
".tsx"
]);
function toBuffer(t: any): Buffer {
return Buffer.from(JSON.stringify(t))
}

View File

@ -1,28 +1,47 @@
{
"name": "@swc/core",
"version": "1.2.22",
"version": "1.2.22-alpha.1",
"description": "Super-fast alternative for babel",
"homepage": "https://swc-project.github.io",
"main": "./index.js",
"author": "강동윤 <kdy1997.dev@gmail.com>",
"license": "MIT",
"keywords": [
"swc",
"spack",
"babel",
"typescript",
"rust",
"webpack",
"tsc"
],
"engines": {
"node": ">=8.0.0"
"node": ">=8.9.0"
},
"repository": {
"type": "git",
"url": "git+https://github.com/swc-project/swc.git"
},
"bugs": {
"url": "https://github.com/swc-project/swc/issues"
},
"os": [
"darwin",
"linux",
"win32"
],
"cpu": [
"x64"
],
"dependencies": {
"mkdirp": "^0.5.1",
"node-fetch": "^2.6.0",
"progress": "^2.0.3",
"true-case-path": "^1.0.3"
"@node-rs/helper": "^0.3.1"
},
"types": "./lib/index.d.ts",
"scripts": {
"prepublish": "tsc -d",
"install": "node scripts/install.js || (npm install neon-cli && neon build --release)",
"build": "tsc -d && neon build --release"
"prepublishOnly": "node ./scripts/npm/prepublish.js",
"build": "tsc -d && cargo build -p node --release && cd ./native && napi build --platform --release .",
"build:dev": "tsc -d && cargo build -p node && cd ./native && napi build --platform .",
"test": "jest node-swc/__tests__"
},
"devDependencies": {
"@babel/core": "^7.2.2",
@ -39,7 +58,8 @@
"browserslist": "^4.12.0",
"jest": "^23.6.0",
"lodash": "^4.17.11",
"neon-cli": "^0.3.3",
"napi-rs": "^0.2.6",
"progress": "^2.0.3",
"source-map": "^0.7.3",
"sourcemap-validator": "^1.1.1",
"typescript": "^3.9.7"
@ -47,5 +67,10 @@
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/swc"
},
"optionalDependencies": {
"@swc/core-darwin": "^1.2.22",
"@swc/core-linux": "^1.2.22",
"@swc/core-win32": "^1.2.22"
}
}

View File

@ -10,7 +10,7 @@ export CARGO_TARGET_DIR=".COVERAGE_DIR"
git clone https://github.com/swc-project/ts-parser-test-ref.git ecmascript/parser/tests/typescript/tsc || true
(cd ecmascript/parser/tests/typescript/tsc && git pull)
cargo test --all --all-features --exclude node-swc --exclude wasm --exclude spack
cargo test --all --all-features --exclude node-swc --exclude wasm
(cd spack && cargo test)
zip -0 ccov.zip `find $CARGO_TARGET_DIR \( -name "swc*.gc*" -o -name 'spack*.gc*' -o -name 'ast_node*.gc*' -o -name 'enum_kind*.gc*' -o -name 'string-enum*.gc*' -o -name 'from_variant*.gc*' \) -print`;

View File

@ -1,226 +0,0 @@
/*!
* Copied from node-sass: scripts/install.js
*/
const fetch = require("node-fetch");
var fs = require("fs"),
os = require("os"),
eol = require("os").EOL,
mkdir = require("mkdirp"),
path = require("path"),
swc = require("../node-swc/src/extensions"),
ProgressBar = require("progress"),
env = process.env;
/**
* Download file, if succeeds save, if not delete
*
* @param {String} url
* @param {String} dest
* @param {Function} cb
* @api private
*/
function download(url, dest, cb) {
var reportError = function(err) {
var timeoutMessge;
if (err.code === "ETIMEDOUT") {
if (err.connect === true) {
// timeout is hit while your client is attempting to establish a connection to a remote machine
timeoutMessge = "Timed out attemping to establish a remote connection";
} else {
timeoutMessge = "Timed out whilst downloading the prebuilt binary";
// occurs any time the server is too slow to send back a part of the response
}
}
cb(
[
'Cannot download "',
url,
'": ',
eol,
eol,
typeof err.message === "string" ? err.message : err,
eol,
eol,
timeoutMessge ? timeoutMessge + eol + eol : timeoutMessge,
"Hint: If github.com is not accessible in your location",
eol,
" try setting a proxy via HTTP_PROXY, e.g. ",
eol,
eol,
" export HTTP_PROXY=http://example.com:1234",
eol,
eol,
"or configure npm proxy via",
eol,
eol,
" npm config set proxy http://example.com:8080"
].join("")
);
process.exit(1);
};
console.log("Downloading binary from", url);
try {
fetch(url).then(function(resp) {
if (200 <= resp.status && resp.status < 300) {
const length = +resp.headers.get("Content-Length");
var progress = new ProgressBar(":bar", { total: length });
progress.render();
// The `progress` is true by default. However if it has not
// been explicitly set it's `undefined` which is considered
// as far as npm is concerned.
if (true) {
resp.body
.on("data", function(chunk) {
progress.tick(chunk.length);
})
.on("end", function() {
progress.terminate();
});
}
resp.body.on("error", cb);
resp.body.pipe(
fs.createWriteStream(dest).on("finsih", function() {
console.log("Download complete");
})
);
} else {
reportError(
["HTTP error", resp.statusCode, resp.statusMessage].join(" ")
);
}
}, reportError);
} catch (err) {
cb(err);
}
}
/**
* Check and download binary
*
* @api private
*/
function checkAndDownloadBinary() {
if (process.env.SKIP_SWC_BINARY_DOWNLOAD_FOR_CI) {
console.log("Skipping downloading binaries on CI builds");
return;
}
if (process.env.npm_config_build_from_source) {
console.info("Building swc from source code");
process.exit(1);
return;
}
var cachedBinary = swc.getCachedBinary(),
cachePath = swc.getBinaryCachePath(),
binaryPath = swc.getBinaryPath();
if (swc.hasBinary(binaryPath)) {
console.log("node-swc build", "Binary found at", binaryPath);
return;
}
try {
mkdir.sync(path.dirname(binaryPath));
} catch (err) {
console.error("Unable to save binary", path.dirname(binaryPath), ":", err);
return;
}
if (cachedBinary) {
console.log("Cached binary found at", cachedBinary);
fs.createReadStream(cachedBinary).pipe(fs.createWriteStream(binaryPath));
return;
}
download(swc.getBinaryUrl(), binaryPath, function(err) {
if (err) {
console.error(err);
return;
}
console.log("Binary saved to", binaryPath);
cachedBinary = path.join(cachePath, swc.getBinaryName());
if (cachePath) {
console.log("Caching binary to", cachedBinary);
try {
mkdir.sync(path.dirname(cachedBinary));
fs.createReadStream(binaryPath)
.pipe(fs.createWriteStream(cachedBinary))
.on("error", function(err) {
console.log("Failed to cache binary:", err);
});
} catch (err) {
console.log("Failed to cache binary:", err);
}
}
});
}
var BANNER =
"\u001B[96mThank you for using swc (\u001B[94m https://github.com/swc-project/swc \u001B[96m): super-fast javascript and typescript compiler \u001B[0m\n\n" +
"\u001B[96mThe project needs your help! Please consider supporting swc on Open Collective: \u001B[0m\n" +
"\u001B[96m>\u001B[94m https://opencollective.com/swc \u001B[0m\n";
var ADBLOCK = is(env.ADBLOCK);
var COLOR = is(env.npm_config_color);
var DISABLE_OPENCOLLECTIVE = is(env.DISABLE_OPENCOLLECTIVE);
var SILENT =
["silent", "error", "warn"].indexOf(env.npm_config_loglevel) !== -1;
var MINUTE = 60 * 1000;
// you could add a PR with an env variable for your CI detection
var CI = ["BUILD_NUMBER", "CI", "CONTINUOUS_INTEGRATION", "RUN_ID"].some(
function(it) {
return is(env[it]);
}
);
function is(it) {
return !!it && it !== "0" && it !== "false";
}
function isBannerRequired() {
if (ADBLOCK || CI || DISABLE_OPENCOLLECTIVE || SILENT) return false;
var file = path.join(os.tmpdir(), "core-js-banners");
var banners = [];
try {
var DELTA = Date.now() - fs.statSync(file).mtime;
if (DELTA >= 0 && DELTA < MINUTE * 3) {
banners = JSON.parse(fs.readFileSync(file, "utf8"));
if (banners.indexOf(BANNER) !== -1) return false;
}
} catch (error) {
banners = [];
}
try {
banners.push(BANNER);
fs.writeFileSync(file, JSON.stringify(banners), "utf8");
} catch (error) {
/* empty */
}
return true;
}
function showBanner() {
// eslint-disable-next-line no-console,no-control-regex
console.log(COLOR ? BANNER : BANNER.replace(/\u001B\[\d+m/g, ""));
}
if (isBannerRequired()) showBanner();
/**
* If binary does not exist, download it
*/
checkAndDownloadBinary();

View File

@ -0,0 +1,21 @@
{
"name": "@swc/core-darwin",
"description": "The macOS 64-bit binary for @swc/core.",
"repository": "https://github.com/swc-project/swc",
"license": "MIT",
"version": "0.0.0",
"main": "swc.node",
"files": [
"swc.node"
],
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@ -0,0 +1,21 @@
{
"name": "@swc/core-linux",
"description": "The Linux 64-bit binary for @swc/core.",
"repository": "https://github.com/swc-project/swc",
"license": "MIT",
"version": "0.0.0",
"main": "swc.node",
"files": [
"swc.node"
],
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@ -0,0 +1,21 @@
{
"name": "@swc/core-win32",
"description": "The Windows 64-bit binary for @swc-node/core.",
"repository": "https://github.com/swc-project/swc",
"license": "MIT",
"version": "0.0.0",
"main": "swc.node",
"files": [
"swc.node"
],
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"os": [
"win32"
],
"cpu": [
"x64"
]
}

1
scripts/npm/platforms.js Normal file
View File

@ -0,0 +1 @@
module.exports = ['darwin', 'linux', 'win32']

34
scripts/npm/prepublish.js Normal file
View File

@ -0,0 +1,34 @@
const { execSync } = require('child_process')
const fs = require('fs')
const path = require('path')
const { version } = require('../../package.json')
const platforms = require('./platforms')
const updatePackageJson = require('./update-package')
updatePackageJson(path.join(__dirname, '..', '..', 'package.json'), {
optionalDependencies: platforms.reduce((acc, cur) => {
acc[`@swc/core-${cur}`] = `^${version}`
return acc
}, {}),
})
for (const name of platforms) {
const pkgDir = path.join(__dirname, `core-${name}`)
updatePackageJson(path.join(pkgDir, 'package.json'), {
version: `${version}`,
})
}
for (const name of platforms) {
const pkgDir = path.join(__dirname, `core-${name}`)
const bindingFile = fs.readFileSync(path.join(__dirname, '..', '..', 'native', `node.${name}.node`))
fs.writeFileSync(path.join(pkgDir, `swc.node`), bindingFile);
execSync('npm publish', {
cwd: pkgDir,
env: process.env,
stdio: 'inherit',
})
}

View File

@ -0,0 +1,6 @@
const fs = require('fs')
module.exports = function updatePackageJson(path, partial) {
const old = require(path)
fs.writeFileSync(path, JSON.stringify({ ...old, ...partial }, null, 2))
}

8
scripts/setup-env.sh Normal file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -eu
NODE_PLATFORM_NAME=$(node -e "console.log(require('os').platform())")
(cd scripts/npm/core-$NODE_PLATFORM_NAME && npm link)
npm link @swc/core-$NODE_PLATFORM_NAME

View File

@ -1,23 +0,0 @@
/**
* Determine the proxy settings configured by npm
*
* It's possible to configure npm to use a proxy different
* from the system defined proxy. This can be done via the
* `npm config` CLI or the `.npmrc` config file.
*
* If a proxy has been configured in this way we must
* tell request explicitly to use it.
*
* Otherwise we can trust request to the right thing.
*
* @return {String} the proxy configured by npm or an empty string
* @api private
*/
module.exports = function() {
return (
process.env.npm_config_https_proxy ||
process.env.npm_config_proxy ||
process.env.npm_config_http_proxy ||
""
);
};

View File

@ -1,16 +0,0 @@
var pkg = require("../../package.json");
/**
* A custom user agent use for binary downloads.
*
* @api private
*/
module.exports = function() {
return [
"node/",
process.version,
" ",
"node-swc-installer/",
pkg.version
].join("");
};

View File

@ -1,16 +1,28 @@
[package]
name = "spack"
version = "0.0.0"
authors = ["강동윤 <kdy1997.dev@gmail.com>"]
license = "Apache-2.0/MIT"
repository = "https://github.com/swc-project/swc.git"
documentation = "https://swc-project.github.io/rustdoc/swc/"
description = "Speedy web compiler"
documentation = "https://swc-project.github.io/rustdoc/swc/"
edition = "2018"
license = "Apache-2.0/MIT"
name = "spack"
publish = false
repository = "https://github.com/swc-project/swc.git"
version = "0.0.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1"
dashmap = "3"
is-macro = "0.1.8"
log = "0.4.8"
napi = "=0.4.13"
once_cell = "1"
regex = "1"
serde = {version = "1", features = ["derive"]}
serde_json = "1"
string_enum = {version = "0.3", path = "../macros/string_enum"}
swc = {path = "../"}
swc_atoms = {path = "../atoms"}
swc_bundler = {path = "../bundler"}
swc_common = {path = "../common"}
@ -20,22 +32,10 @@ swc_ecma_parser = { path = "../ecmascript/parser" }
swc_ecma_transforms = {path = "../ecmascript/transforms"}
swc_ecma_utils = {path = "../ecmascript/utils"}
swc_ecma_visit = {path = "../ecmascript/visit"}
swc = { path = "../" }
string_enum = { version = "0.3", path ="../macros/string_enum" }
regex = "1"
once_cell = "1"
serde = { version = "1", features = ["derive"] }
anyhow = "1"
dashmap = "3"
log = "0.4.8"
is-macro = "0.1.8"
neon = { version = "0.4.0", features = ["event-handler-api"] }
neon-sys = "0.4.0"
serde_json = "1"
[dev-dependencies]
pretty_assertions = "0.6.1"
testing = { path = "../testing" }
walkdir = "2.3.1"
pretty_env_logger = "0.3"
tempfile = "3"
testing = {path = "../testing"}
walkdir = "2.3.1"

View File

@ -1,2 +1 @@
pub mod neon;
pub mod swc;

View File

@ -1,75 +0,0 @@
use anyhow::{Context as _, Error};
use neon::prelude::*;
use std::sync::{mpsc::channel, Arc};
use swc_bundler::Load;
use swc_common::{FileName, SourceFile};
use swc_ecma_ast::{Module, Program};
/// Loader provided by user.
pub struct NeonLoader {
pub swc: Arc<swc::Compiler>,
pub handler: EventHandler,
}
impl Load for NeonLoader {
fn load(&self, name: &FileName) -> Result<(Arc<SourceFile>, Module), Error> {
let path = name.to_string();
let (tx, rx) = channel();
self.handler.schedule_with(move |cx, _value, f| {
//
let this = cx.undefined();
let path = cx.string(path);
let res = f.call(cx, this, vec![path]);
let res = match res {
Ok(v) => v,
Err(err) => {
let _ = tx.send(Err(Error::msg(format!(
"failed to invoke js laoder: {}",
err
))));
return;
}
};
if let Ok(code) = res.downcast::<JsString>() {
let s = code.value();
match tx.send(Ok(s)) {
Ok(_) => return,
Err(err) => {
let _ = tx.send(Err(Error::msg(format!(
"failed to send result back: {}",
err
))));
return;
}
}
}
let _ = tx.send(Err(Error::msg("failed to invoke js laoder")));
});
let code = rx
.recv()
.context("failed to receive output from js loader")?;
let code = code?;
let fm = self.swc.cm.new_source_file(name.clone(), code);
let config = self.swc.config_for_file(
&swc::config::Options {
swcrc: true,
..Default::default()
},
&fm.name,
)?;
let module = self
.swc
.parse_js(fm.clone(), config.target, config.syntax, true, true)?;
let module = match module {
Program::Module(v) => v,
Program::Script(_) => unreachable!("script"),
};
Ok((fm, module))
}
}