ECMAScript parser (#1)

- Parser and lexer for lastest ecma spec https://tc39.github.io/ecma262

  - Lexer is currently very inefficient

 - Use https://github.com/tc39/test262-parser-tests/ for testing.

 - Implement proc-macro based ast folder and assert_eq_ignore_span! based on it.

 - Some utilities for proc macro at /macros/common
This commit is contained in:
강동윤 2018-01-12 16:53:06 +09:00 committed by GitHub
parent 7e2ef3b518
commit e949c40517
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
93 changed files with 10422 additions and 670 deletions

4
.cargo/config Normal file
View File

@ -0,0 +1,4 @@
[build]
rustflags = ["--cfg", "procmacro2_semver_exempt"]
rustdocflags = ["--cfg", "procmacro2_semver_exempt", "--document-private-items"]

3
.gitignore vendored
View File

@ -1,4 +1,5 @@
/target/
**/*.bk
Cargo.lock
core
*.log

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "ecmascript/test262-parser-tests"]
path = ecmascript/parser/tests/test262-parser
url = git@github.com:tc39/test262-parser-tests.git

View File

@ -3,3 +3,4 @@ reorder_imports_in_group = true
reorder_imported_names = true
wrap_comments = true
write_mode = "replace"
same_line_attributes = false

View File

@ -5,6 +5,12 @@ rust:
- nightly
cache:
- cargo
git:
submodules: false
before_install:
- sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules
- git submodule update --init --recursive
before_script:
- |
pip install 'travis-cargo<0.2' --user &&
@ -12,8 +18,9 @@ before_script:
script:
- |
export TRAVIS_CARGO_NIGHTLY_FEATURE="" &&
travis-cargo build &&
travis-cargo test &&
travis-cargo build -- --all &&
travis-cargo test -- --all &&
export RUSTDOC="./scripts/rustdoc.sh" &&
travis-cargo doc
after_success:
- travis-cargo doc-upload

756
Cargo.lock generated Normal file
View File

@ -0,0 +1,756 @@
[[package]]
name = "aho-corasick"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ast_node"
version = "0.1.0"
dependencies = [
"pmutil 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"swc_common 0.1.0",
"swc_macros 0.1.0",
"swc_macros_common 0.1.0",
"syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "backtrace"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "backtrace-sys"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bitflags"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cc"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "chrono"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "crossbeam"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "debug_unreachable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "either"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "enum_kind"
version = "0.1.0"
dependencies = [
"pmutil 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"swc_macros_common 0.1.0",
"syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure_derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "fnv"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "fuchsia-zircon-sys"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "isatty"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazy_static"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazy_static"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "libc"
version = "0.2.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "log"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "log"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "memchr"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-integer"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-iter"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "parser_macros"
version = "0.1.0"
dependencies = [
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"swc_macros_common 0.1.0",
"syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "phf_generator"
version = "0.7.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "phf_shared"
version = "0.7.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "pmutil"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "proc-macro2"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "quote"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "redox_syscall"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "redox_termios"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
version = "0.1.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex-syntax"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc-demangle"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "serde"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "siphasher"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slog"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slog-async"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"take_mut 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "slog-envlogger"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-async 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-scope 4.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-stdlog 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-term 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "slog-scope"
version = "4.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "slog-stdlog"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-scope 4.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "slog-term"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"isatty 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "string_cache"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "string_cache_codegen"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "string_cache_shared"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "swc"
version = "0.1.0"
dependencies = [
"swc_atoms 0.1.0",
"swc_common 0.1.0",
"swc_ecmascript 0.1.0",
"swc_macros 0.1.0",
]
[[package]]
name = "swc_atoms"
version = "0.1.0"
dependencies = [
"string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "swc_common"
version = "0.1.0"
dependencies = [
"either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "swc_ecma_ast"
version = "0.1.0"
dependencies = [
"swc_atoms 0.1.0",
"swc_common 0.1.0",
"swc_macros 0.1.0",
]
[[package]]
name = "swc_ecma_parser"
version = "0.1.0"
dependencies = [
"either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parser_macros 0.1.0",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"swc_atoms 0.1.0",
"swc_common 0.1.0",
"swc_ecma_ast 0.1.0",
"swc_macros 0.1.0",
"testing 0.1.0",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "swc_ecmascript"
version = "0.1.0"
dependencies = [
"swc_ecma_ast 0.1.0",
"swc_ecma_parser 0.1.0",
]
[[package]]
name = "swc_macros"
version = "0.1.0"
dependencies = [
"ast_node 0.1.0",
"enum_kind 0.1.0",
]
[[package]]
name = "swc_macros_common"
version = "0.1.0"
dependencies = [
"pmutil 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "0.11.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synom"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synstructure"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "take_mut"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "term"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "termion"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "testing"
version = "0.1.0"
dependencies = [
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-envlogger 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slog-term 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"swc_common 0.1.0",
]
[[package]]
name = "thread-id"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread_local"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread_local"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "time"
version = "0.1.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unicode-xid"
version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unreachable"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unreachable"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "utf8-ranges"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "void"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-x86_64-pc-windows-gnu 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
"checksum backtrace 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ebbbf59b1c43eefa8c3ede390fcc36820b4999f7914104015be25025e0d62af2"
"checksum backtrace-sys 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "44585761d6161b0f57afc49482ab6bd067e4edef48c12a152c237eb0203f7661"
"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
"checksum cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "deaf9ec656256bb25b404c51ef50097207b9cbb29c933d31f92cae5a8a0ffee0"
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
"checksum chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c20ebe0b2b08b0aeddba49c609fe7957ba2e33449882cb186a180bc60682fa9"
"checksum crossbeam 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "bd66663db5a988098a89599d4857919b3acf7f61402e61365acfd3919857b9be"
"checksum debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3"
"checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
"checksum failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c7cdda555bb90c9bb67a3b670a0f42de8e73f5981524123ad8578aafec8ddb8b"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum isatty 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "8f2a233726c7bb76995cec749d59582e5664823b7245d4970354408f1d79a7a2"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
"checksum libc 0.2.35 (registry+https://github.com/rust-lang/crates.io-index)" = "96264e9b293e95d25bfcbbf8a88ffd1aedc85b754eba8b7d78012f638ba220eb"
"checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2"
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum num 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "cc4083e14b542ea3eb9b5f33ff48bd373a92d78687e74f4cc0a30caeb754f0ca"
"checksum num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "d1452e8b06e448a07f0e6ebb0bb1d92b8890eea63288c0b627331d53514d0fba"
"checksum num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "7485fcc84f85b4ecd0ea527b14189281cf27d60e583ae65ebc9c088b13dffe01"
"checksum num-traits 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "cacfcab5eb48250ee7d0c7896b51a2c5eec99c1feea5f32025635f5ae4b00070"
"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
"checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
"checksum pmutil 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5483812bd1a336ae14013e7e7afc700a29a11695f8a4768431bab3ed7f9d54d6"
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum proc-macro2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9786e83afb5654ab1b336584548011f252db3c320c0ddba5dc21a1a76f83ca"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"
"checksum rand 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)" = "512870020642bb8c221bf68baa1b2573da814f6ccfe5c9699b1c303047abe9b1"
"checksum redox_syscall 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "0d92eecebad22b767915e4d529f89f28ee96dbbf5a4810d2b844373f136417fd"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e"
"checksum serde 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)" = "db99f3919e20faa51bb2996057f5031d8685019b5a06139b1ce761da671b8526"
"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537"
"checksum slog 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6b13b17f4225771f7f15cece704a4e68d3a5f31278ed26367f497133398a18"
"checksum slog-async 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5e319a30c08b004618d5f7ca2f2b1dad7b4623ba7fcb1a12846fc3b01e9eaa10"
"checksum slog-envlogger 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7c6685180086bf58624e92cb3da5d5f013bebd609454926fc8e2ac6345d384b"
"checksum slog-scope 4.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "053344c94c0e2b22da6305efddb698d7c485809427cf40555dc936085f67a9df"
"checksum slog-stdlog 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ac42f8254ae996cc7d640f9410d3b048dcdf8887a10df4d5d4c44966de24c4a8"
"checksum slog-term 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5bb5d9360b2b279b326824b3b4ca2402ead8a8138f0e5ec1900605c861bb6671"
"checksum string_cache 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "413fc7852aeeb5472f1986ef755f561ddf0c789d3d796e65f0b6fe293ecd4ef8"
"checksum string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7"
"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
"checksum syn 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a0dceeafa95292b8a313f611317ae2fcba87dcd9a83e17c0adb4497efe81e7c3"
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
"checksum synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a761d12e6d8dcb4dcf952a7a89b475e3a9d69e4a69307e01a470977642914bd"
"checksum take_mut 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50b910a1174df4aeb5738e8a0e7253883cf7801de40d094175a5a557e487f4c5"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
"checksum time 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "a15375f1df02096fb3317256ce2cee6a1f42fc84ea5ad5fc8c421cfe40c73098"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b09fb3b6f248ea4cd42c9a65113a847d612e17505d6ebd1f7357ad68a8bf8693"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum winapi-i686-pc-windows-gnu 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ec6667f60c23eca65c561e63a13d81b44234c2e38a6b6c959025ee907ec614cc"
"checksum winapi-x86_64-pc-windows-gnu 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "98f12c52b2630cd05d2c3ffd8e008f7f48252c042b4871c72aed9dc733b96668"

View File

@ -1,6 +1,5 @@
[workspace]
[package]
name = "swc"
version = "0.1.0"
@ -8,5 +7,10 @@ authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_atoms = { path = "./atoms" }
swc_ecmascript = { path = "./ecmascript" }
swc_common = { path = "./common" }
swc_macros = { path = "./macros" }
swc_macros = { path = "./macros" }
[profile.bench]
debug = true

View File

@ -5,46 +5,67 @@ use std::path::Path;
fn main() {
gen(
"js_ident",
"JsIdent",
"js_word",
"JsWord",
&[
// keywords
"await",
"break",
"case",
"catch",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"else",
"export",
"extends",
"finally",
"for",
"function",
"if",
"import",
"in",
"instanceof",
"new",
"return",
"super",
"switch",
"this",
"throw",
"try",
"typeof",
"var",
"let",
"const",
"void",
"while",
"with",
"new",
"this",
"super",
"class",
"extends",
"export",
"import",
"yield",
// reserved word on strict mode.
"let",
"static",
"null",
"true",
"false",
"in",
"instanceof",
"typeof",
"void",
"delete",
// not keywords, just for pattern matching
"from",
"static",
"of",
"set",
"get",
"target",
"await",
"async",
"as",
// future reserved words?
"implements",
"interface",
"package",
"private",
"protected",
"public",
],
);
}

View File

@ -1,3 +1,3 @@
extern crate string_cache;
include!(concat!(env!("OUT_DIR"), "/js_ident.rs"));
include!(concat!(env!("OUT_DIR"), "/js_word.rs"));

View File

@ -4,3 +4,6 @@ version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
fnv = "1"
string_cache = "0.6"
either = "1.4"

View File

@ -1,7 +1,14 @@
use EqIgnoreSpan;
use std::fmt::Debug;
use std::hash::Hash;
/// Currently just a marker trait.
///
pub trait AstNode: Debug + EqIgnoreSpan + Hash + Clone {}
///
///
///
/// # Derive
/// This trait can be derived with `#[derive(AstNode)]`.
///
pub trait AstNode: Debug + PartialEq + Clone {}
/// Marker.
impl AstNode for ! {}

58
common/src/compat/mod.rs Normal file
View File

@ -0,0 +1,58 @@
//! Tracking for used html5 features.
use fnv::FnvHashSet;
use std::ops::{Add, AddAssign};
pub trait CanIUse {
fn report_used_features(&self, features: &mut UsedFeatures);
}
/// Feature from `caniuse.com`.
/// TODO: Make this enum?
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Feature(&'static str);
impl Feature {
/// not public api
#[doc(hidden)]
pub const fn new(s: &'static str) -> Self {
Feature(s)
}
pub fn as_str(self) -> &'static str {
self.0
}
}
/// Creates a `caniuse::compat::Feature` with given string.
#[macro_export]
macro_rules! caniuse_feature {
($s:expr) => {{
$crate::compat::Feature::new($s)
}}
}
#[derive(Debug, Clone, Default)]
pub struct UsedFeatures {
feats: FnvHashSet<Feature>,
}
impl UsedFeatures {
pub fn finalize(self) -> FnvHashSet<Feature> {
self.feats
}
}
impl Add<Feature> for UsedFeatures {
type Output = Self;
fn add(mut self, rhs: Feature) -> Self {
self += rhs;
self
}
}
impl AddAssign<Feature> for UsedFeatures {
fn add_assign(&mut self, rhs: Feature) {
self.feats.insert(rhs);
}
}

View File

@ -1,88 +0,0 @@
pub trait EqIgnoreSpan: Eq {
fn eq_ignore_span(&self, other: &Self) -> bool;
}
impl<T: ?Sized + Eq> EqIgnoreSpan for T {
default fn eq_ignore_span(&self, other: &Self) -> bool {
*self == *other
}
}
macro_rules! impl_for_eq_ty {
($Type:ty) => {
impl $crate::EqIgnoreSpan for $Type {
fn eq_ignore_span(&self, other: &Self) -> bool { *self == *other }
}
};
($Type:ty,) => {
impl_for_eq_ty!($Type);
};
($Type:ty, $($rest:tt)+) => {
impl_for_eq_ty!($Type);
impl_for_eq_ty!($($rest)*);
};
}
impl_for_eq_ty!(
bool,
u8,
u16,
u32,
u64,
usize,
i8,
i16,
i32,
i64,
isize,
String,
char,
);
impl<T: EqIgnoreSpan> EqIgnoreSpan for Option<T> {
fn eq_ignore_span(&self, other: &Self) -> bool {
match (self.as_ref(), other.as_ref()) {
(Some(l), Some(r)) if l.eq_ignore_span(r) => true,
_ => false,
}
}
}
impl<'a, T: ?Sized + EqIgnoreSpan> EqIgnoreSpan for &'a T {
fn eq_ignore_span(&self, other: &Self) -> bool {
<T as EqIgnoreSpan>::eq_ignore_span(*self, *other)
}
}
impl<T: EqIgnoreSpan> EqIgnoreSpan for [T] {
fn eq_ignore_span(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
for i in 0..self.len() {
if !self[i].eq_ignore_span(&other[i]) {
return false;
}
}
true
}
}
impl<T> EqIgnoreSpan for Box<T>
where
T: ?Sized + EqIgnoreSpan,
{
fn eq_ignore_span(&self, other: &Self) -> bool {
<T as EqIgnoreSpan>::eq_ignore_span(&self, &other)
}
}
impl<T> EqIgnoreSpan for Vec<T>
where
T: EqIgnoreSpan,
{
fn eq_ignore_span(&self, other: &Self) -> bool {
(&self[..]).eq_ignore_span(&other[..])
}
}

80
common/src/fold.rs Normal file
View File

@ -0,0 +1,80 @@
use either::Either;
use string_cache::{Atom, StaticAtomSet};
/// This trait requires `specialization` feature.
pub trait Folder<T> {
fn fold(&mut self, t: T) -> T;
}
/// This trait can be derived with `#[derive(AstNode)]`.
pub trait FoldWith<F> {
fn fold_children(self, f: &mut F) -> Self;
}
impl<T, F> Folder<T> for F
where
T: FoldWith<F>,
{
/// Default implementation which folds childrens with `self`.
default fn fold(&mut self, t: T) -> T {
t.fold_children(self)
}
}
impl<F> FoldWith<F> for ! {
fn fold_children(self, _: &mut F) -> Self {
self
}
}
impl<T, F> FoldWith<F> for Box<T>
where
T: FoldWith<F>,
{
fn fold_children(self, f: &mut F) -> Self {
box f.fold(*self)
}
}
impl<T, F> FoldWith<F> for Vec<T>
where
T: FoldWith<F>,
{
fn fold_children(self, f: &mut F) -> Self {
self.into_iter().map(|it| f.fold(it)).collect()
}
}
impl<T, F> FoldWith<F> for Option<T>
where
T: FoldWith<F>,
{
fn fold_children(self, f: &mut F) -> Self {
self.map(|t| t.fold_children(f))
}
}
impl<F> FoldWith<F> for String {
/// No op.
fn fold_children(self, _: &mut F) -> Self {
self
}
}
impl<F, S: StaticAtomSet> FoldWith<F> for Atom<S> {
/// No op.
fn fold_children(self, _: &mut F) -> Self {
self
}
}
impl<A, B, F> FoldWith<F> for Either<A, B>
where
F: Folder<A> + Folder<B>,
{
fn fold_children(self, f: &mut F) -> Self {
match self {
Either::Left(a) => Either::Left(Folder::<A>::fold(f, a)),
Either::Right(b) => Either::Right(Folder::<B>::fold(f, b)),
}
}
}

24
common/src/gen_iter.rs Normal file
View File

@ -0,0 +1,24 @@
use std::ops::{Generator, GeneratorState};
struct GenIter<G: Generator<Return = ()>>(G);
impl<G> Iterator for GenIter<G>
where
G: Generator<Return = ()>,
{
type Item = G::Yield;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
match self.0.resume() {
GeneratorState::Yielded(item) => Some(item),
GeneratorState::Complete(()) => None,
}
}
}
/// Creates a new iterator from `gen`.
pub const fn gen_iter<G>(gen: G) -> impl Iterator<Item = G::Yield>
where
G: Generator<Return = ()>,
{
GenIter(gen)
}

View File

@ -1,9 +1,22 @@
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(const_fn)]
#![feature(try_trait)]
#![feature(optin_builtin_traits)]
#![feature(never_type)]
#![feature(specialization)]
#![feature(generator_trait)]
extern crate either;
extern crate fnv;
extern crate string_cache;
pub use self::ast_node::AstNode;
pub use self::eq_ignore_span::EqIgnoreSpan;
pub use self::span::{BytePos, Span};
pub use self::gen_iter::gen_iter;
pub use self::span::{BytePos, Span, Spanned};
pub mod compat;
pub mod fold;
pub mod parser;
mod ast_node;
mod eq_ignore_span;
mod gen_iter;
mod span;

8
common/src/parser/mod.rs Normal file
View File

@ -0,0 +1,8 @@
use BytePos;
pub trait SpannedToken {
type Token;
fn into_triple(self) -> (BytePos, Self::Token, BytePos);
fn from_triple(start: BytePos, t: Self::Token, end: BytePos) -> Self;
}

View File

@ -1,18 +1,35 @@
use std::fmt::{self, Display, Formatter};
use fold::FoldWith;
use std::fmt::{self, Debug, Display, Formatter};
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct BytePos(pub u32);
impl Display for BytePos {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.0.fmt(f)
Display::fmt(&self.0, f)
}
}
impl Debug for BytePos {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Debug::fmt(&self.0, f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Span {
/// Inclusive
pub start: BytePos,
/// Inclusive
pub end: BytePos,
}
impl Debug for Span {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.start == BytePos(0) && self.end == BytePos(0) {
write!(f, "_")
} else {
write!(f, "{}..{}", self.start, self.end.0 + 1)
}
}
}
impl Span {
pub const DUMMY: Span = Span {
@ -28,10 +45,22 @@ impl Default for Span {
}
}
impl ::EqIgnoreSpan for Span {
/// always returns true
#[inline]
fn eq_ignore_span(&self, _: &Self) -> bool {
true
pub trait Spanned<T>: Sized {
fn from_unspanned(node: T, span: Span) -> Self;
}
impl<S, T> Spanned<T> for Box<S>
where
S: Spanned<T>,
{
fn from_unspanned(node: T, span: Span) -> Self {
box S::from_unspanned(node, span)
}
}
impl<F> FoldWith<F> for Span {
/// no-op
fn fold_children(self, _: &mut F) -> Span {
self
}
}

11
ecmascript/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "swc_ecmascript"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_ecma_ast = { path = "./ast" }
swc_ecma_parser = { path = "./parser" }
[dev-dependencies]

View File

@ -0,0 +1,9 @@
[package]
name = "swc_ecma_ast"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_atoms = { path = "../../atoms" }
swc_macros = { path = "../../macros" }
swc_common = { path = "../../common" }

View File

@ -0,0 +1,38 @@
use super::{Expr, Function, PropName};
use swc_common::Span;
use swc_common::fold::FoldWith;
use swc_macros::ast_node;
#[ast_node]
pub struct Class {
pub span: Span,
pub super_class: Option<Box<Expr>>,
pub body: Vec<ClassMethod>,
}
#[ast_node]
pub struct ClassMethod {
pub key: PropName,
pub function: Function,
#[fold(ignore)]
pub kind: ClassMethodKind,
pub is_static: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ClassMethodKind {
Constructor,
Method,
Getter,
Setter,
}
impl<F> FoldWith<F> for ClassMethodKind {
fn fold_children(self, _: &mut F) -> Self {
self
}
}

View File

@ -0,0 +1,59 @@
use super::{Class, Expr, Function, Ident, Pat};
use swc_common::Span;
use swc_macros::ast_node;
#[ast_node]
pub enum Decl {
Class(ClassDecl),
Fn { ident: Ident, function: Function },
Var(VarDecl),
}
impl Decl {
pub fn span(&self) -> Span {
match *self {
Decl::Class(ClassDecl {
class: Class { span, .. },
..
})
| Decl::Fn {
function: Function { span, .. },
..
}
| Decl::Var(VarDecl { span, .. }) => span,
}
}
}
#[ast_node]
pub struct ClassDecl {
pub ident: Ident,
pub class: Class,
}
#[ast_node]
pub struct VarDecl {
pub span: Span,
pub kind: VarDeclKind,
pub decls: Vec<VarDeclarator>,
}
#[ast_node]
pub enum VarDeclKind {
Var,
Let,
Const,
}
#[ast_node]
pub struct VarDeclarator {
pub span: Span,
pub name: Pat,
/// Initialization expresion.
pub init: Option<Box<Expr>>,
}

378
ecmascript/ast/src/expr.rs Normal file
View File

@ -0,0 +1,378 @@
use super::{BlockStmt, Class, Function, Ident, Lit, Pat, Prop};
use swc_common::{Span, Spanned};
use swc_common::fold::FoldWith;
use swc_macros::ast_node;
#[ast_node]
pub struct Expr {
pub span: Span,
pub node: ExprKind,
}
impl Spanned<ExprKind> for Expr {
fn from_unspanned(node: ExprKind, span: Span) -> Self {
Expr { span, node }
}
}
#[ast_node]
pub enum ExprKind {
This,
Array {
elems: Vec<Option<ExprOrSpread>>,
},
Object {
props: Vec<Prop>,
},
Function(FnExpr),
Unary {
op: UnaryOp,
prefix: bool,
arg: Box<Expr>,
},
/// `++v`, `--v`, `v++`, `v--`
///
Update {
op: UpdateOp,
prefix: bool,
arg: Box<Expr>,
},
Binary {
op: BinaryOp,
left: Box<Expr>,
right: Box<Expr>,
},
Assign {
op: AssignOp,
/// Pattern | Expr
left: PatOrExpr,
right: Box<Expr>,
},
//
// Logical {
//
// op: LogicalOp,
// left: Box<Expr>,
// right: Box<Expr>,
// },
/// A member expression. If computed is true, the node corresponds to a computed
/// (a[b]) member expression and property is an Expression. If computed is false, the node
/// corresponds to a static (a.b) member expression and property is an Identifier.
Member {
obj: ExprOrSuper,
prop: Box<Expr>,
computed: bool,
},
/// true ? 'a' : 'b'
Cond {
test: Box<Expr>,
cons: Box<Expr>,
alt: Box<Expr>,
},
Call {
callee: ExprOrSuper,
args: Vec<ExprOrSpread>,
},
/// `new Cat()`
New {
callee: Box<Expr>,
/// `None` for `new Cat`.
args: Option<Vec<ExprOrSpread>>,
},
Seq {
exprs: Vec<Box<Expr>>,
},
Ident(Ident),
Lit(Lit),
Tpl(TplLit),
// TODO: Use JsFn
Arrow {
params: Vec<Pat>,
body: BlockStmtOrExpr,
is_generator: bool,
is_async: bool,
},
Class(ClassExpr),
Yield {
arg: Option<Box<Expr>>,
delegate: bool,
},
MetaProp {
meta: Ident,
prop: Ident,
},
Await {
arg: Box<Expr>,
},
Paren(Box<Expr>),
}
/// Function expression.
#[ast_node]
pub struct FnExpr {
pub ident: Option<Ident>,
pub function: Function,
}
/// Class expression.
#[ast_node]
pub struct ClassExpr {
pub ident: Option<Ident>,
pub class: Class,
}
#[ast_node]
pub struct TplLit {
pub tag: Option<Box<Expr>>,
pub exprs: Vec<Box<Expr>>,
pub quasis: Vec<TplElement>,
}
#[ast_node]
pub struct TplElement {
pub tail: bool,
pub cooked: bool,
pub raw: String,
}
#[ast_node]
pub enum ExprOrSuper {
Super(Span),
Expr(Box<Expr>),
}
impl ExprOrSuper {
pub fn span(&self) -> Span {
match *self {
ExprOrSuper::Super(s) => s,
ExprOrSuper::Expr(ref e) => e.span,
}
}
}
#[ast_node]
pub enum ExprOrSpread {
Expr(Box<Expr>),
Spread(Box<Expr>),
}
#[ast_node]
pub enum BlockStmtOrExpr {
BlockStmt(BlockStmt),
Expr(Box<Expr>),
}
impl BlockStmtOrExpr {
pub fn span(&self) -> Span {
match *self {
BlockStmtOrExpr::BlockStmt(BlockStmt { span, .. }) => span,
BlockStmtOrExpr::Expr(box Expr { span, .. }) => span,
}
}
}
#[ast_node]
pub enum PatOrExpr {
Pat(Pat),
Expr(Box<Expr>),
}
#[derive(Kind, Debug, Clone, Copy, Eq, PartialEq, Hash)]
#[kind(function(precedence = "u8"))]
pub enum BinaryOp {
/// `==`
#[kind(precedence = "6")]
EqEq,
/// `!=`
#[kind(precedence = "6")]
NotEq,
/// `==="`
#[kind(precedence = "6")]
EqEqEq,
/// `!==`
#[kind(precedence = "6")]
NotEqEq,
/// `<`
#[kind(precedence = "7")]
Lt,
/// `<=`
#[kind(precedence = "7")]
LtEq,
/// `>`
#[kind(precedence = "7")]
Gt,
/// `>=`
#[kind(precedence = "7")]
GtEq,
/// `<<`
#[kind(precedence = "8")]
LShift,
/// `>>`
#[kind(precedence = "8")]
RShift,
/// `>>>`
#[kind(precedence = "8")]
ZeroFillRShift,
/// `+`
#[kind(precedence = "9")]
Add,
/// `-`
#[kind(precedence = "9")]
Sub,
/// `*`
#[kind(precedence = "10")]
Mul,
/// `/`
#[kind(precedence = "10")]
Div,
/// `%`
#[kind(precedence = "10")]
Mod,
/// `|`
#[kind(precedence = "3")]
BitOr,
/// `^`
#[kind(precedence = "4")]
BitXor,
/// `&`
#[kind(precedence = "5")]
BitAnd,
/// `||`
#[kind(precedence = "1")]
LogicalOr,
/// `&&`
#[kind(precedence = "2")]
LogicalAnd,
/// `in`
#[kind(precedence = "7")]
In,
/// `instanceof`
#[kind(precedence = "7")]
InstanceOf,
/// `**`
#[kind(precedence = "11")]
Exp,
}
impl<F> FoldWith<F> for BinaryOp {
fn fold_children(self, _: &mut F) -> Self {
self
}
}
impl From<Ident> for Box<Expr> {
fn from(i: Ident) -> Self {
let span = i.span;
box Expr {
span,
node: ExprKind::Ident(i),
}
}
}
#[ast_node]
#[derive(Copy)]
pub enum UpdateOp {
/// `++`
PlusPlus,
/// `--`
MinusMinus,
}
#[ast_node]
#[derive(Copy)]
pub enum UnaryOp {
/// `-`
Minus,
/// `+`
Plus,
/// `!`
Bang,
/// `~`
Tilde,
/// `typeof`
TypeOf,
/// `void`
Void,
/// `delete`
Delete,
}
#[ast_node]
#[derive(Copy)]
pub enum AssignOp {
/// `=`
Assign,
/// `+=`
AddAssign,
/// `-=`
SubAssign,
/// `*=`
MulAssign,
/// `/=`
DivAssign,
/// `%=`
ModAssign,
/// `<<=`
LShiftAssign,
/// `>>=`
RShiftAssign,
/// `>>>=`
ZeroFillRShiftAssign,
/// `|=`
BitOrAssign,
/// `^=`
BitXorAssign,
/// `&=`
BitAndAssign,
/// `**=`
ExpAssign,
}

View File

@ -0,0 +1,16 @@
use super::{BlockStmt, Pat};
use swc_common::Span;
use swc_macros::ast_node;
/// Common parts of function and method.
#[ast_node]
pub struct Function {
pub params: Vec<Pat>,
pub span: Span,
pub body: BlockStmt,
pub is_generator: bool,
pub is_async: bool,
}

67
ecmascript/ast/src/lib.rs Normal file
View File

@ -0,0 +1,67 @@
#![feature(box_syntax)]
#![feature(box_patterns)]
#![feature(specialization)]
#![feature(never_type)]
#![feature(proc_macro)]
#![deny(unreachable_patterns)]
extern crate swc_atoms;
extern crate swc_common;
#[macro_use]
extern crate swc_macros;
pub use self::class::*;
pub use self::decl::*;
pub use self::expr::*;
pub use self::function::*;
pub use self::lit::*;
pub use self::module::*;
pub use self::module_decl::*;
pub use self::pat::*;
pub use self::prop::*;
pub use self::stmt::*;
use std::fmt::{self, Debug, Display, Formatter};
use swc_atoms::JsWord;
use swc_common::{Span, Spanned};
use swc_macros::AstNode;
mod class;
mod decl;
mod expr;
mod function;
mod lit;
mod module;
mod module_decl;
mod pat;
mod prop;
mod stmt;
/// Ident with span.
#[derive(AstNode, Clone, PartialEq)]
pub struct Ident {
pub span: Span,
#[fold(ignore)]
pub sym: JsWord,
}
impl Debug for Ident {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_tuple("Ident")
.field(&DebugUsingDisplay(&self.sym))
.field(&self.span)
.finish()
}
}
struct DebugUsingDisplay<T: Display>(T);
impl<T: Display> Debug for DebugUsingDisplay<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl Spanned<JsWord> for Ident {
fn from_unspanned(sym: JsWord, span: Span) -> Self {
Ident { span, sym }
}
}

36
ecmascript/ast/src/lit.rs Normal file
View File

@ -0,0 +1,36 @@
use std::fmt::{self, Display, Formatter};
use swc_macros::ast_node;
#[ast_node]
pub enum Lit {
Str(String),
Bool(bool),
Null,
Num(Number),
Regex(Regex),
}
#[ast_node]
pub struct Regex {
pub exp: String,
#[fold = "regex_flags"]
pub flags: RegexFlags,
}
pub type RegexFlags = ::swc_atoms::JsWord;
#[ast_node]
pub struct Number(pub f64);
impl Display for Number {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.0.is_infinite() {
if self.0.is_sign_positive() {
Display::fmt("Infinity", f)
} else {
Display::fmt("-Infinity", f)
}
} else {
Display::fmt(&self.0, f)
}
}
}

View File

@ -0,0 +1,26 @@
use super::{ModuleDecl, Stmt, StmtKind};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
#[ast_node]
pub struct Module {
pub body: Vec<ModuleItem>,
}
#[ast_node]
pub enum ModuleItem {
Stmt(Stmt),
ModuleDecl(ModuleDecl),
}
impl Spanned<StmtKind> for ModuleItem {
fn from_unspanned(node: StmtKind, span: Span) -> Self {
ModuleItem::Stmt(Stmt::from_unspanned(node, span))
}
}
impl From<Stmt> for ModuleItem {
fn from(stmt: Stmt) -> Self {
ModuleItem::Stmt(stmt)
}
}

View File

@ -0,0 +1,84 @@
use super::{Class, Decl, Expr, Function, Ident, VarDecl};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
#[ast_node]
pub struct ModuleDecl {
pub span: Span,
pub node: ModuleDeclKind,
}
#[ast_node]
pub enum ModuleDeclKind {
Import {
specifiers: Vec<ImportSpecifier>,
src: String,
},
ExportDecl(Decl),
/// `export { foo } from 'mod'`
/// `export { foo as bar } from 'mod'`
ExportNamed {
specifiers: Vec<ExportSpecifier>,
src: Option<String>,
},
ExportDefaultDecl(ExportDefaultDecl),
ExportDefaultExpr(Box<Expr>),
/// `export * from 'mod'`
ExportAll {
src: String,
},
}
#[ast_node]
pub enum ExportDefaultDecl {
Class {
ident: Option<Ident>,
class: Class,
},
Fn {
ident: Option<Ident>,
function: Function,
},
Var(VarDecl),
}
#[ast_node]
pub struct ImportSpecifier {
pub span: Span,
pub local: Ident,
pub node: ImportSpecifierKind,
}
#[ast_node]
pub enum ImportSpecifierKind {
/// e.g. local = foo, imported = None `import { foo } from 'mod.js'`
/// e.g. local = bar, imported = Some(foo) for `import { foo as bar } from 'mod.js'`
Specific { imported: Option<Ident> },
/// e.g. `import foo from 'mod.js'`
Default,
/// e.g. `import * as foo from 'mod.js'`.
Namespace,
}
#[ast_node]
pub struct ExportSpecifier {
/// `foo` in `export { foo as bar }`
pub orig: Ident,
/// `Some(bar)` in `export { foo as bar }`
pub exported: Option<Ident>,
}
impl Spanned<ModuleDeclKind> for ModuleDecl {
fn from_unspanned(node: ModuleDeclKind, span: Span) -> Self {
ModuleDecl { span, node }
}
}

57
ecmascript/ast/src/pat.rs Normal file
View File

@ -0,0 +1,57 @@
use super::{Expr, Ident, PropName};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
#[ast_node]
pub struct Pat {
pub span: Span,
pub node: PatKind,
}
impl Spanned<PatKind> for Pat {
fn from_unspanned(node: PatKind, span: Span) -> Self {
Pat { span, node }
}
}
#[ast_node]
pub enum PatKind {
Ident(Ident),
Array(Vec<Option<Pat>>),
Rest(Box<Pat>),
Object {
props: Vec<ObjectPatProp>,
},
Assign {
left: Box<Pat>,
right: Box<Expr>,
},
/// Only for for-in / for-of loops. This is *syntatically* valid.
Expr(Box<Expr>),
}
#[ast_node]
pub enum ObjectPatProp {
/// `{key: value}`
KeyValue { key: PropName, value: Box<Pat> },
/// `{key}` or `{key = value}`
Assign {
key: Ident,
value: Option<Box<Expr>>,
},
}
impl From<Ident> for Pat {
fn from(id: Ident) -> Self {
Pat {
span: id.span,
node: PatKind::Ident(id),
}
}
}

View File

@ -0,0 +1,73 @@
use super::{BlockStmt, Expr, Function, Ident, Number, Pat};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
#[ast_node]
pub struct Prop {
pub span: Span,
pub node: PropKind,
}
impl Spanned<PropKind> for Prop {
fn from_unspanned(node: PropKind, span: Span) -> Self {
Prop { span, node }
}
}
#[ast_node]
pub enum PropKind {
/// `a` in `{ a, }`
Shorthand(Ident),
/// `key: value` in `{ key: value, }`
KeyValue {
key: PropName,
value: Box<Expr>,
},
/// This is **invalid** for object literal.
Assign {
key: Ident,
value: Box<Expr>,
},
Getter {
key: PropName,
body: BlockStmt,
},
Setter {
key: PropName,
param: Pat,
body: BlockStmt,
},
Method {
key: PropName,
function: Function,
},
}
#[ast_node]
pub enum PropName {
Ident(Ident),
/// String literal.
Str(String),
/// Numeric literal.
Num(Number),
Computed(Box<Expr>),
}
impl From<Ident> for PropName {
fn from(i: Ident) -> Self {
PropName::Ident(i)
}
}
impl Prop {
pub fn new_shorthand(ident: Ident) -> Self {
let span = ident.span;
Prop {
span,
node: PropKind::Shorthand(ident),
}
}
}

163
ecmascript/ast/src/stmt.rs Normal file
View File

@ -0,0 +1,163 @@
use super::{Decl, Expr, Ident, Pat, VarDecl};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
#[ast_node]
pub struct Stmt {
pub span: Span,
pub node: StmtKind,
}
impl From<Decl> for Stmt {
fn from(decl: Decl) -> Self {
Stmt {
span: decl.span(),
node: StmtKind::Decl(decl),
}
}
}
impl Spanned<StmtKind> for Stmt {
fn from_unspanned(node: StmtKind, span: Span) -> Self {
Stmt { span, node }
}
}
/// Use when only block statements are allowed.
#[ast_node]
pub struct BlockStmt {
/// Span of brace.
pub span: Span,
pub stmts: Vec<Stmt>,
}
impl Spanned<Vec<Stmt>> for BlockStmt {
fn from_unspanned(stmts: Vec<Stmt>, span: Span) -> Self {
BlockStmt { span, stmts }
}
}
#[ast_node]
pub enum StmtKind {
Expr(Box<Expr>),
Block(BlockStmt),
Empty,
Debugger,
With {
obj: Box<Expr>,
body: Box<Stmt>,
},
Return {
arg: Option<Box<Expr>>,
},
Labeled {
label: Ident,
body: Box<Stmt>,
},
Break {
label: Option<Ident>,
},
Continue {
label: Option<Ident>,
},
If {
test: Box<Expr>,
consequent: Box<Stmt>,
alt: Option<Box<Stmt>>,
},
Switch {
discriminant: Box<Expr>,
cases: Vec<SwitchCase>,
},
Throw {
arg: Box<Expr>,
},
/// A try statement. If handler is null then finalizer must be a BlockStmt.
Try {
block: BlockStmt,
handler: Option<CatchClause>,
finalizer: Option<BlockStmt>,
},
While {
test: Box<Expr>,
body: Box<Stmt>,
},
DoWhile {
test: Box<Expr>,
body: Box<Stmt>,
},
For {
/// VarDecl | Expr | null
init: Option<VarDeclOrExpr>,
test: Option<Box<Expr>>,
update: Option<Box<Expr>>,
body: Box<Stmt>,
},
ForIn {
left: VarDeclOrPat,
right: Box<Expr>,
body: Box<Stmt>,
},
ForOf {
left: VarDeclOrPat,
right: Box<Expr>,
body: Box<Stmt>,
},
Decl(Decl),
}
#[ast_node]
pub struct SwitchCase {
// pub span: Span,
/// None for `default:`
pub test: Option<Box<Expr>>,
pub consequent: Vec<Stmt>,
}
#[ast_node]
pub struct CatchClause {
pub param: Pat,
pub body: BlockStmt,
}
#[ast_node]
pub enum VarDeclOrPat {
VarDecl(VarDecl),
Pat(Pat),
}
#[ast_node]
pub enum VarDeclOrExpr {
VarDecl(VarDecl),
Expr(Box<Expr>),
}

View File

@ -0,0 +1,19 @@
[package]
name = "swc_ecma_parser"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_atoms = { path = "../../atoms" }
swc_macros = { path = "../../macros" }
swc_common = { path = "../../common" }
swc_ecma_ast = { path = "../ast" }
parser_macros = { path = "../parser_macros" }
unicode-xid = "0.1"
failure = "0.1"
slog = "2.1"
either = { version = "1.4" }
[dev-dependencies]
testing = { path = "../../testing" }

View File

@ -0,0 +1,16 @@
#!/bin/sh
set -eu
reset
export DBG_DUMP=1
cargo fmt >&2
cargo doc -p swc_ecma_ast -p swc_macros_common >&2
cargo check >&2
cargo doc >&2
export RUST_TEST_THREADS=1
export RUST_LOG="swc_ecmascript::parser=debug"
# cargo test --lib parser
cargo test --test test262

View File

@ -0,0 +1,38 @@
use swc_atoms::JsWord;
use swc_common::Span;
use token::Token;
#[derive(Debug)]
pub enum SyntaxError {
/// "implements", "interface", "let", "package",\
/// "private", "protected", "public", "static", or "yield"
InvalidIdentInStrict,
/// 'eval' and 'arguments' are invalid identfier in strict mode.
EvalAndArgumentsInStrict,
UnaryInExp,
LineBreakInThrow,
Expected(&'static Token),
/// "await* has been removed from the async functions proposal. Use
/// Promise.all() instead."
AwaitStar,
/// "cannot use a reserved word as a shorthand property"
ReservedWordInObjShorthandOrPat,
MultipleDefault,
CommaAfterRestElement,
NonLastRestParam,
SpreadInParenExpr,
/// `()`
EmptyParenExpr,
ExpectedIdent,
ExpctedSemi,
DuplicateLabel(JsWord),
AsyncGenerator,
NonTopLevelImportExport,
/// Destructuring bindings require initializers.
PatVarWithoutInit {
span: Span,
},
}

View File

@ -0,0 +1,123 @@
use std::fmt::Debug;
use std::str;
use swc_common::BytePos;
/// Used inside lexer.
pub(super) struct LexerInput<I: Input> {
cur: Option<(BytePos, char)>,
last_pos: BytePos,
input: I,
}
impl<I: Input> LexerInput<I> {
pub const fn new(input: I) -> Self {
LexerInput {
input,
last_pos: BytePos(0),
cur: None,
}
}
pub fn bump(&mut self) {
let pos = self.cur
.take()
.unwrap_or_else(|| unreachable!("bump called on eof"))
.0;
self.cur = self.input.next();
self.last_pos = pos;
}
pub fn peek(&mut self) -> Option<char> {
self.input.peek().map(|(_, c)| c)
}
/// Get char at `cur + 2`.
pub fn peek_ahead(&mut self) -> Option<char> {
self.input.peek_ahead().map(|(_, c)| c)
}
pub fn current(&mut self) -> Option<char> {
match self.cur {
Some((_, c)) => Some(c),
None => {
let next = self.input.next();
self.cur = next;
self.cur.map(|(_, c)| c)
}
}
}
pub fn cur_pos(&mut self) -> BytePos {
self.current();
self.cur.map(|(p, _)| p).unwrap_or(self.last_pos)
}
pub fn last_pos(&self) -> BytePos {
self.last_pos
}
}
#[derive(Debug, Clone)]
pub struct CharIndices<'a>(pub str::CharIndices<'a>);
impl<'a> Input for CharIndices<'a> {
type Error = ();
fn peek(&mut self) -> Option<(BytePos, char)> {
self.clone().nth(0)
}
fn peek_ahead(&mut self) -> Option<(BytePos, char)> {
self.clone().nth(1)
}
fn uncons_while<F>(&mut self, f: F) -> Option<&str>
where
F: FnMut(char) -> bool,
{
//TODO?
None
}
}
impl<'a> Iterator for CharIndices<'a> {
type Item = (BytePos, char);
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(i, c)| (BytePos(i as _), c))
}
}
pub trait Input: Iterator<Item = (BytePos, char)> {
type Error: Debug;
fn peek(&mut self) -> Option<(BytePos, char)>;
fn peek_ahead(&mut self) -> Option<(BytePos, char)>;
///Takes items from stream, testing each one with predicate. returns the
/// range of items which passed predicate.
fn uncons_while<F>(&mut self, f: F) -> Option<&str>
where
F: FnMut(char) -> bool;
}
impl<'a, I> Input for &'a mut I
where
I: Input,
{
type Error = I::Error;
fn peek(&mut self) -> Option<(BytePos, char)> {
<I as Input>::peek(*self)
}
fn peek_ahead(&mut self) -> Option<(BytePos, char)> {
<I as Input>::peek_ahead(*self)
}
fn uncons_while<F>(&mut self, f: F) -> Option<&str>
where
F: FnMut(char) -> bool,
{
<I as Input>::uncons_while(self, f)
}
}

View File

@ -0,0 +1,49 @@
macro_rules! cur {
($l:expr) => {{
$l.input.current()
}};
}
macro_rules! bump {
($l:expr) => {{
$l.input.bump()
}};
}
macro_rules! peek {
($l:expr) => {{
$l.input.peek()
}};
}
macro_rules! peek_ahead {
($l:expr) => {{
$l.input.peek_ahead()
}};
}
macro_rules! cur_pos {
($l:expr) => {{
$l.input.cur_pos()
}};
}
macro_rules! last_pos {
($l:expr) => {{
$l.input.last_pos()
}};
}
macro_rules! is {
($l:expr, $t:tt) => {{
cur!($l) == Some($t)
}};
}
macro_rules! eat {
($l:expr, $t:tt) => {{
if is!($l, $t) {
bump!($l);
true
} else {
false
}
}};
}

View File

@ -0,0 +1,724 @@
//! ECMAScript lexer.
//!
//! In future, this might use string directly.
#![allow(unused_mut)]
#![allow(unused_variables)]
pub use self::input::Input;
use self::input::LexerInput;
use self::state::State;
use self::util::*;
use parser_macros::parser;
use slog::Logger;
use std::char;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use token::*;
#[macro_use]
mod macros;
pub mod input;
mod number;
mod state;
#[cfg(test)]
mod tests;
pub mod util;
#[derive(Fail, Debug, PartialEq, Eq, Hash)]
pub enum Error<InputError> {
#[fail(display = "input error: {}", err)]
Input { err: InputError },
#[fail(display = "unterminated string constant: {}", start)]
UnterminatedStrLit { start: BytePos },
#[fail(display = "expected unicode escape sequence: {}", pos)]
ExpectedUnicodeEscape { pos: BytePos },
#[fail(display = "unexpected escape sequence in reserved word: {:?}", word)]
EscapeInReservedWord { word: Word },
#[fail(display = "unterminated regexp (regexp started at {})", start)]
UnterminatedRegxp { start: BytePos },
#[fail(display = "identifier directly after number at {}", pos)]
IdentAfterNum { pos: BytePos },
#[fail(display = "Decimals with leading zeros (at {}) are not allowed in strict mode", start)]
DecimalStartsWithZero { start: BytePos },
#[fail(display = "Octals with leading zeros (at {}) are not allowed in strict mode", start)]
ImplicitOctalOnStrict { start: BytePos },
#[fail(display = "Unexpected character '{}' at {}", c, pos)]
UnexpectedChar { pos: BytePos, c: char },
#[fail(display = "Invalid string escape at {}", start)]
InvalidStrEscape { start: BytePos },
#[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape { pos: Span },
#[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint { pos: Span },
#[fail(display = "Invalid identifier character at {:?}", pos)]
InvalidIdentChar { pos: Span },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Options {
/// Support function bind expression.
pub fn_bind: bool,
pub strict: bool,
/// Support numeric separator.
pub num_sep: bool,
pub module: bool,
}
pub struct Lexer<I: Input> {
logger: Logger,
input: LexerInput<I>,
opts: Options,
state: State,
}
impl<I: Input> Lexer<I> {
pub fn new(logger: Logger, input: I) -> Self {
Self::new_with(logger, Options::default(), input)
}
pub fn new_with(logger: Logger, opts: Options, input: I) -> Self {
Lexer {
logger,
opts,
state: State::new(),
input: LexerInput::new(input),
}
}
fn read_token(&mut self) -> Result<Option<Token>, Error<I::Error>> {
let c = match self.input.current() {
Some(c) => c,
None => return Ok(None),
};
let start = cur_pos!(self);
let token = match c {
// Identifier or keyword. '\uXXXX' sequences are allowed in
// identifiers, so '\' also dispatches to that.
c if c == '\\' || c.is_ident_start() => return self.read_ident_or_keyword().map(Some),
//
'.' => {
// Check for eof
let next = match self.input.peek() {
Some(next) => next,
None => {
self.input.bump();
return Ok(Some(tok!('.')));
}
};
if '0' <= next && next <= '9' {
return self.read_number(true).map(Token::Num).map(Some);
}
self.input.bump(); // 1st `.`
if next == '.' && self.input.peek() == Some('.') {
self.input.bump(); // 2nd `.`
self.input.bump(); // 3rd `.`
return Ok(Some(tok!("...")));
}
return Ok(Some(tok!('.')));
}
'(' | ')' | ';' | ',' | '[' | ']' | '{' | '}' | '@' | '?' => {
// These tokens are emitted directly.
self.input.bump();
return Ok(Some(match c {
'(' => LParen,
')' => RParen,
';' => Semi,
',' => Comma,
'[' => LBracket,
']' => RBracket,
'{' => LBrace,
'}' => RBrace,
'@' => At,
'?' => QuestionMark,
_ => unreachable!(),
}));
}
'`' => {
bump!(self);
return Ok(Some(tok!('`')));
}
':' => {
self.input.bump();
if self.opts.fn_bind && self.input.current() == Some(':') {
self.input.bump();
return Ok(Some(tok!("::")));
}
return Ok(Some(tok!(':')));
}
'0' => {
let next = self.input.peek();
let radix = match next {
Some('x') | Some('X') => 16,
Some('o') | Some('O') => 8,
Some('b') | Some('B') => 2,
_ => return self.read_number(false).map(Num).map(Some),
};
return self.read_radix_number(radix).map(Num).map(Some);
}
'1'...'9' => return self.read_number(false).map(Num).map(Some),
'"' | '\'' => return self.read_str_lit().map(Some),
'/' => return self.read_slash(),
c @ '%' | c @ '*' => {
let is_mul = c == '*';
self.input.bump();
let mut token = if is_mul { BinOp(Mul) } else { BinOp(Mod) };
// check for **
if is_mul {
if self.input.current() == Some('*') {
self.input.bump();
token = BinOp(Exp)
}
}
if self.input.current() == Some('=') {
self.input.bump();
token = match token {
BinOp(Mul) => AssignOp(MulAssign),
BinOp(Mod) => AssignOp(ModAssign),
BinOp(Exp) => AssignOp(ExpAssign),
_ => unreachable!(),
}
}
token
}
// Logical operators
c @ '|' | c @ '&' => {
self.input.bump();
let token = if c == '&' { BitAnd } else { BitOr };
// '|=', '&='
if self.input.current() == Some('=') {
self.input.bump();
return Ok(Some(AssignOp(match token {
BitAnd => BitAndAssign,
BitOr => BitOrAssign,
_ => unreachable!(),
})));
}
// '||', '&&'
if self.input.current() == Some(c) {
self.input.bump();
return Ok(Some(BinOp(match token {
BitAnd => LogicalAnd,
BitOr => LogicalOr,
_ => unreachable!(),
})));
}
BinOp(token)
}
'^' => {
// Bitwise xor
self.input.bump();
if self.input.current() == Some('=') {
self.input.bump();
AssignOp(BitXorAssign)
} else {
BinOp(BitXor)
}
}
'+' | '-' => {
self.input.bump();
// '++', '--'
if self.input.current() == Some(c) {
self.input.bump();
// Handle -->
if self.state.had_line_break && c == '-' && is!(self, '>') {
self.skip_line_comment(1);
self.skip_space();
return self.read_token();
}
if c == '+' {
PlusPlus
} else {
MinusMinus
}
} else if self.input.current() == Some('=') {
self.input.bump();
AssignOp(if c == '+' { AddAssign } else { SubAssign })
} else {
BinOp(if c == '+' { Add } else { Sub })
}
}
'<' | '>' => return self.read_token_lt_gt(),
'!' | '=' => {
self.input.bump();
if self.input.current() == Some('=') {
// "=="
self.input.bump();
if self.input.current() == Some('=') {
self.input.bump();
if c == '!' {
BinOp(NotEqEq)
} else {
BinOp(EqEqEq)
}
} else {
if c == '!' {
BinOp(NotEq)
} else {
BinOp(EqEq)
}
}
} else if c == '=' && self.input.current() == Some('>') {
// "=>"
self.input.bump();
Arrow
} else {
if c == '!' {
Bang
} else {
AssignOp(Assign)
}
}
}
'~' => {
self.input.bump();
tok!('~')
}
// unexpected character
c => return Err(Error::UnexpectedChar { c, pos: start }),
};
Ok(Some(token))
}
/// Read an escaped charater for string literal.
fn read_escaped_char(&mut self, in_template: bool) -> Result<Option<char>, Error<I::Error>> {
assert_eq!(cur!(self), Some('\\'));
let start = cur_pos!(self);
bump!(self); // '\'
let c = match cur!(self) {
Some(c) => c,
None => return Err(Error::InvalidStrEscape { start }),
};
let c = match c {
'n' => '\n',
'r' => '\r',
't' => '\t',
'b' => '\u{0008}',
'v' => '\u{000b}',
'f' => '\u{000c}',
'\r' => {
bump!(self); // remove '\r'
if cur!(self) == Some('\n') {
bump!(self);
}
return Ok(None);
}
'\n' | '\u{2028}' | '\u{2029}' => {
bump!(self);
return Ok(None);
}
// read hexadecimal escape sequences
'x' => {
bump!(self); // 'x'
return self.read_hex_char(2).map(Some);
}
// read unicode escape sequences
'u' => {
return self.read_unicode_escape(start).map(Some);
}
// octal escape sequences
'0'...'7' => {
bump!(self);
let first_c = if c == '0' {
match cur!(self) {
Some(next) if next.is_digit(8) => c,
_ => return Ok(Some('\u{0000}')),
}
} else {
c
};
let mut value: u8 = first_c.to_digit(8).unwrap() as u8;
macro_rules! one {
($check:expr) => {{
match cur!(self).and_then(|c| c.to_digit(8)) {
Some(v) => {
value = if $check {
let new_val = value
.checked_mul(8)
.and_then(|value| value.checked_add(v as u8));
match new_val {
Some(val) => val,
None => return Ok(Some(value as char)),
}
} else {
value * 8 + v as u8
};
bump!(self);
}
_ => {
return Ok(Some(value as char))
},
}
}};
}
one!(false);
one!(true);
return Ok(Some(value as char));
}
_ => c,
};
self.input.bump();
Ok(Some(c))
}
}
#[parser]
impl<I: Input> Lexer<I> {
fn read_slash(&mut self) -> Result<Option<Token>, Error<I::Error>> {
debug_assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
// Regex
if self.state.is_expr_allowed {
return self.read_regexp().map(Some);
}
// Divide operator
bump!();
Ok(Some(if cur!() == Some('=') {
bump!();
tok!("/=")
} else {
tok!('/')
}))
}
fn read_token_lt_gt(&mut self) -> Result<Option<Token>, Error<I::Error>> {
assert!(cur!() == Some('<') || cur!() == Some('>'));
let c = cur!().unwrap();
bump!();
// XML style comment. `<!--`
if !self.opts.module && c == '<' && is!('!') && peek!() == Some('-')
&& peek_ahead!() == Some('-')
{
self.skip_line_comment(3);
self.skip_space();
return self.read_token();
}
let mut op = if c == '<' { Lt } else { Gt };
// '<<', '>>'
if cur!() == Some(c) {
bump!();
op = if c == '<' { LShift } else { RShift };
//'>>>'
if c == '>' && cur!() == Some(c) {
bump!();
op = ZeroFillRShift;
}
}
let token = if eat!('=') {
match op {
Lt => BinOp(LtEq),
Gt => BinOp(GtEq),
LShift => AssignOp(LShiftAssign),
RShift => AssignOp(RShiftAssign),
ZeroFillRShift => AssignOp(ZeroFillRShiftAssign),
_ => unreachable!(),
}
} else {
BinOp(op)
};
Ok(Some(token))
}
/// See https://tc39.github.io/ecma262/#sec-names-and-keywords
fn read_ident_or_keyword(&mut self) -> Result<Token, Error<I::Error>> {
assert!(cur!().is_some());
let (word, has_escape) = self.read_word_as_str()?;
let word = Word::from(word);
if has_escape && word.is_reserved_word(self.opts.strict) {
return Err(Error::EscapeInReservedWord { word });
}
Ok(Word(word))
}
fn may_read_word_as_str(&mut self) -> Result<Option<(JsWord, bool)>, Error<I::Error>> {
match cur!() {
Some(c) if c.is_ident_start() => self.read_word_as_str().map(Some),
_ => Ok(None),
}
}
/// returns (word, has_escape)
fn read_word_as_str(&mut self) -> Result<(JsWord, bool), Error<I::Error>> {
assert!(cur!().is_some());
let mut has_escape = false;
let mut word = String::new();
let mut first = true;
while let Some(c) = cur!() {
let start = cur_pos!();
// TODO: optimize (cow / chunk)
match c {
c if c.is_ident_part() => {
bump!();
word.push(c);
}
// unicode escape
'\\' => {
bump!();
if !is!('u') {
return Err(Error::ExpectedUnicodeEscape { pos: cur_pos!() });
}
let c = self.read_unicode_escape(start)?;
let valid = if first {
c.is_ident_start()
} else {
c.is_ident_part()
};
if !valid {
return Err(Error::InvalidIdentChar { pos: span!(start) });
}
word.push(c);
}
_ => {
break;
}
}
first = false;
}
Ok((word.into(), has_escape))
}
fn read_unicode_escape(&mut self, start: BytePos) -> Result<char, Error<I::Error>> {
assert_eq!(cur!(), Some('u'));
bump!();
if eat!('{') {
let cp_start = cur_pos!();
let c = self.read_code_point()?;
if !eat!('}') {
return Err(Error::InvalidUnicodeEscape { pos: span!(start) });
}
Ok(c)
} else {
self.read_hex_char(4)
}
}
fn read_hex_char(&mut self, count: u8) -> Result<char, Error<I::Error>> {
debug_assert!(count == 2 || count == 4);
let pos = cur_pos!();
match self.read_int(16, count)? {
Some(val) => match char::from_u32(val) {
Some(c) => Ok(c),
None => unimplemented!("Syntax Error: not char? val = {}", val),
},
None => unimplemented!("Syntax Error: expected {} hex chars", count),
}
}
/// Read `CodePoint`.
fn read_code_point(&mut self) -> Result<char, Error<I::Error>> {
let start = cur_pos!();
let val = self.read_int(16, 0)?;
match val {
Some(val) if 0x10FFFF >= val => match char::from_u32(val) {
Some(c) => Ok(c),
None => return Err(Error::InvalidCodePoint { pos: span!(start) }),
},
_ => return Err(Error::InvalidCodePoint { pos: span!(start) }),
}
}
/// See https://tc39.github.io/ecma262/#sec-literals-string-literals
fn read_str_lit(&mut self) -> Result<Token, Error<I::Error>> {
assert!(cur!() == Some('\'') || cur!() == Some('"'));
let start = cur_pos!();
let quote = cur!().unwrap();
bump!(); // '"'
let mut out = String::new();
//TODO: Optimize (Cow, Chunk)
while let Some(c) = cur!() {
match c {
c if c == quote => {
bump!();
return Ok(Str(out, c == '"'));
}
'\\' => out.extend(self.read_escaped_char(false)?),
c if c.is_line_break() => return Err(Error::UnterminatedStrLit { start }),
_ => {
out.push(c);
bump!();
}
}
}
Err(Error::UnterminatedStrLit { start })
}
/// Expects current char to be '/'
fn read_regexp(&mut self) -> Result<Token, Error<I::Error>> {
assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
bump!();
let (mut escaped, mut in_class) = (false, false);
// TODO: Optimize (chunk, cow)
let mut content = String::new();
while let Some(c) = cur!() {
// This is ported from babel.
// Seems like regexp literal cannot contain linebreak.
if c.is_line_break() {
return Err(Error::UnterminatedRegxp { start });
}
if escaped {
escaped = false;
} else {
match c {
'[' => in_class = true,
']' if in_class => in_class = false,
// Termniates content part of regex literal
'/' if !in_class => break,
_ => {}
}
escaped = c == '\\';
}
bump!();
content.push(c);
}
// input is terminated without following `/`
if cur!() != Some('/') {
return Err(Error::UnterminatedRegxp { start });
}
bump!(); // '/'
// Spec says "It is a Syntax Error if IdentifierPart contains a Unicode escape
// sequence." TODO: check for escape
// Need to use `read_word` because '\uXXXX' sequences are allowed
// here (don't ask).
let flags = self.may_read_word_as_str()?
.map(|(f, _)| f)
.unwrap_or_else(|| "".into());
Ok(Regex(content, flags))
}
fn read_tmpl_token(&mut self) -> Result<Token, Error<I::Error>> {
let start = cur_pos!();
// TODO: Optimize
let mut out = String::new();
while let Some(c) = cur!() {
if c == '`' || (c == '$' && peek!() == Some('{')) {
if start == cur_pos!() && self.state.last_was_tpl_element() {
if c == '$' {
bump!();
bump!();
return Ok(tok!("${"));
} else {
bump!();
return Ok(tok!('`'));
}
}
// TODO: Handle error
return Ok(Template(out));
}
if c == '\\' {
let ch = self.read_escaped_char(true)?;
out.extend(ch);
} else if c.is_line_break() {
self.state.had_line_break = true;
let c = if c == '\r' && peek!() == Some('\n') {
bump!(); // '\r'
'\n'
} else {
c
};
bump!();
out.push(c);
} else {
bump!();
out.push(c);
}
}
unimplemented!("error: unterminated template");
}
}
impl<I: Input> ::parser::Input for Lexer<I> {
fn had_line_break_before_last(&self) -> bool {
self.state.had_line_break
}
}
impl<'a> Lexer<input::CharIndices<'a>> {
pub fn new_from_str(logger: Logger, s: &'a str) -> Self {
Lexer::new(logger, input::CharIndices(s.char_indices()))
}
}

View File

@ -0,0 +1,393 @@
//! Lexer methods related to reading numbers.
//!
//!
//! See https://tc39.github.io/ecma262/#sec-literals-numeric-literals
use super::*;
use std::fmt::Display;
#[parser]
impl<I: Input> Lexer<I> {
/// Reads an integer, octal integer, or floating-point number
///
///
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> Result<Number, Error<I::Error>> {
assert!(cur!().is_some());
if starts_with_dot {
debug_assert_eq!(
cur!(),
Some('.'),
"read_number(starts_with_dot = true) expects current char to be '.'"
);
}
let start = cur_pos!();
let starts_with_zero = cur!().unwrap() == '0';
let val = if starts_with_dot {
// first char is '.'
0f64
} else {
// Use read_number_no_dot to support long numbers.
let val = self.read_number_no_dot(10)?;
if starts_with_zero {
// TODO: I guess it would be okay if I don't use -ffast-math
// (or something like that), but needs review.
if val == 0.0f64 {
// If only one zero is used, it's decimal.
// And if multiple zero is used, it's octal.
//
// e.g. `0` is decimal (so it can be part of float)
//
// e.g. `000` is octal
if start != last_pos!() {
return self.make_legacy_octal(start, 0f64);
}
} else {
// strict mode hates non-zero decimals starting with zero.
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
if self.opts.strict {
return Err(Error::DecimalStartsWithZero { start });
}
let s = format!("{}", val); // TODO: Remove allocation.
// if it contains '8' or '9', it's decimal.
if s.contains('8') || s.contains('9') {
} else {
// It's Legacy octal, and we should reinterpret value.
let val = u64::from_str_radix(&format!("{}", val), 8)
.expect("Does this can really happen?");
let val = format!("{}", val)
.parse()
.expect("failed to parse numeric value as f64");
return self.make_legacy_octal(start, val);
}
}
}
val
};
// At this point, number cannot be an octal literal.
let mut val: f64 = val;
// `0.a`, `08.a`, `102.a` are invalid.
//
// `.1.a`, `.1e-4.a` are valid,
if cur!() == Some('.') {
bump!();
if starts_with_dot {
debug_assert!(cur!().is_some());
debug_assert!(cur!().unwrap().is_digit(10));
}
// Read numbers after dot
let minority_val = self.read_int(10, 0)?;
let minority: &Display = match minority_val {
Some(ref n) => n,
// "0.", "0.e1" is valid
None => &"",
};
// TODO
val = format!("{}.{}", val, minority)
.parse()
.expect("failed to parse float using rust's impl");
}
// Handle 'e' and 'E'
//
// .5e1 = 5
// 1e2 = 100
// 1e+2 = 100
// 1e-2 = 0.01
if eat!('e') || eat!('E') {
let next = match cur!() {
Some(next) => next,
None => unimplemented!("expected +, - or digit after e"),
};
let positive = if next == '+' || next == '-' {
bump!(); // remove '+', '-'
next == '+'
} else {
true
};
// TODO: Optimize this
let exp = self.read_number_no_dot(10)?;
let flag = if positive { '+' } else { '-' };
// TODO:
val = format!("{}e{}{}", val, flag, exp)
.parse()
.expect("failed to parse float literal");
}
self.ensure_not_ident()?;
Ok(Number(val))
}
pub(super) fn read_radix_number(&mut self, radix: u8) -> Result<Number, Error<I::Error>> {
debug_assert!(
radix == 2 || radix == 8 || radix == 16,
"radix should be one of 2, 8, 16, but got {}",
radix
);
debug_assert_eq!(cur!(), Some('0'));
let start = bump!(); // 0
bump!(); // x
let val = self.read_number_no_dot(radix)?;
self.ensure_not_ident()?;
Ok(Number(val))
}
/// This can read long integers like
/// "13612536612375123612312312312312312312312".
fn read_number_no_dot(&mut self, radix: u8) -> Result<f64, Error<I::Error>> {
debug_assert!(
radix == 2 || radix == 8 || radix == 10 || radix == 16,
"radix for read_number_no_dot should be one of 2, 8, 10, 16, but got {}",
radix
);
self.read_digits(radix, |total, radix, v| {
(f64::mul_add(total, radix as f64, v as f64), true)
})
}
/// Ensure that ident cannot directly follow numbers.
fn ensure_not_ident(&mut self) -> Result<(), Error<I::Error>> {
match cur!() {
Some(c) if c.is_ident_start() => Err(Error::IdentAfterNum { pos: cur_pos!() }),
_ => Ok(()),
}
}
/// Read an integer in the given radix. Return `None` if zero digits
/// were read, the integer value otherwise.
/// When `len` is not zero, this
/// will return `None` unless the integer has exactly `len` digits.
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> Result<Option<u32>, Error<I::Error>> {
let mut count = 0;
self.read_digits(radix, |opt: Option<u32>, radix, val| {
count += 1;
let total = opt.unwrap_or_default() * radix as u32 + val as u32;
(Some(total), count != len)
})
}
/// `op`- |total, radix, value| -> (total * radix + value, continue)
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> Result<Ret, Error<I::Error>>
where
F: FnMut(Ret, u8, u32) -> (Ret, bool),
Ret: Copy + Default,
{
debug_assert!(
radix == 2 || radix == 8 || radix == 10 || radix == 16,
"radix for read_int should be one of 2, 8, 10, 16, but got {}",
radix
);
debug!(
self.logger,
"read_digits(radix = {}), cur = {:?}",
radix,
cur!(self)
);
let start = cur_pos!();
let mut total: Ret = Default::default();
while let Some(c) = cur!() {
if self.opts.num_sep {
// let prev: char = unimplemented!("prev");
// let next = self.input.peek();
// if c == '_' {
// if !allowed_siblings.contains(&next) {
// unimplemented!("Error(Invalid or unexpected token)");
// }
// if forbidden_siblings.contains(&prev) ||
// forbidden_siblings.contains(&next) ||
// Number::is_nan(next) {
// unimplemented!("Error(Invalid or unexpected token)");
// }
// // Ignore this _ character
// self.input.bump();
// }
unimplemented!("numeric separator")
}
// e.g. (val for a) = 10 where radix = 16
let val = if let Some(val) = c.to_digit(radix as _) {
val
} else {
break;
};
bump!();
let (t, cont) = op(total, radix, val);
total = t;
if !cont {
break;
}
}
Ok(total)
}
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> Result<Number, Error<I::Error>> {
self.ensure_not_ident()?;
return if self.opts.strict {
Err(Error::ImplicitOctalOnStrict { start })
} else {
// FIXME
Ok(Number(val))
};
}
}
#[cfg(test)]
mod tests {
use super::*;
use lexer::input::CharIndices;
use std::f64::INFINITY;
use std::panic;
fn lexer(s: &'static str) -> Lexer<CharIndices<'static>> {
let l = ::testing::logger().new(o!("src" => s));
Lexer::new_from_str(l, s)
}
fn num(s: &'static str) -> f64 {
lexer(s)
.read_number(s.starts_with("."))
.expect("read_number failed")
.0
}
fn int(radix: u8, s: &'static str) -> u32 {
lexer(s)
.read_int(radix, 0)
.expect("read_int failed")
.expect("read_int returned None")
}
const LONG: &str = "1e10000000000000000000000000000000000000000\
0000000000000000000000000000000000000000000000000000";
#[test]
fn num_inf() {
assert_eq!(num(LONG), INFINITY);
}
/// Number >= 2^53
#[test]
fn num_big_exp() {
assert_eq!(1e30, num("1e30"));
}
#[test]
#[ignore]
fn num_big_many_zero() {
assert_eq!(
1000000000000000000000000000000f64,
num("1000000000000000000000000000000")
)
}
#[test]
fn num_legacy_octal() {
assert_eq!(0o12 as f64, num("0012"));
}
#[test]
fn read_int_1() {
assert_eq!(60, int(10, "60"));
assert_eq!(0o73, int(8, "73"));
}
#[test]
fn read_int_short() {
assert_eq!(7, int(10, "7"));
}
#[test]
fn read_radix_number() {
assert_eq!(Ok(Number(0o73 as f64)), lexer("0o73").read_radix_number(8));
}
/// Valid even on strict mode.
const VALID_CASES: &[&str] = &[".0", "0.e-1", "0e8", ".8e1", "0.8e1", "1.18e1"];
const INVALID_CASES_ON_STRICT: &[&str] = &["08e1", "08.1", "08.8e1", "08", "01"];
const INVALID_CASES: &[&str] = &[".e-1", "01.8e1", "012e1", "00e1", "00.0"];
fn test_floats(strict: bool, success: bool, cases: &'static [&'static str]) {
for case in cases {
let logger = ::testing::logger().new(o!("src" => case,
"strict" => strict,
"expected" => if success { "success" } else { "error" }
));
// lazy way to get expected value..
let expected: f64 = (i64::from_str_radix(case, 8).map(|v| v as f64))
.or_else(|_| case.parse::<i64>().map(|v| v as f64))
.or_else(|_| case.parse::<f64>())
.expect("failed to parse `expected` as float using str.parse()");
let input = CharIndices(case.char_indices());
let vec = panic::catch_unwind(|| {
Lexer::new_with(
logger,
Options {
strict,
..Default::default()
},
input,
).map(|ts| ts.token)
.collect::<Vec<_>>()
});
if success {
let vec = match vec {
Ok(vec) => vec,
Err(err) => panic::resume_unwind(err),
};
assert_eq!(vec.len(), 1);
let token = vec.into_iter().next().unwrap();
assert_eq!(Num(Number(expected)), token);
} else {
match vec {
Ok(vec) => assert!(vec![Num(Number(expected))] != vec),
_ => {}
}
}
}
}
#[test]
fn strict_mode() {
test_floats(true, true, VALID_CASES);
test_floats(true, false, INVALID_CASES_ON_STRICT);
test_floats(true, false, INVALID_CASES);
}
#[test]
fn non_strict() {
test_floats(false, true, VALID_CASES);
test_floats(false, true, INVALID_CASES_ON_STRICT);
test_floats(false, false, INVALID_CASES);
}
}

View File

@ -0,0 +1,330 @@
use super::{Input, Lexer};
use parser_macros::parser;
use slog::Logger;
use swc_common::{BytePos, Span};
use token::*;
/// State of lexer.
///
/// Ported from babylon.
#[derive(Debug)]
pub(super) struct State {
pub is_expr_allowed: bool,
pub octal_pos: Option<BytePos>,
/// if line break exists between previous token and new token?
pub had_line_break: bool,
/// TODO: Remove this field.
is_first: bool,
context: Context,
// TODO: Create a new enum `TokenType` instead of cloning token.
token_type: Option<Token>,
}
#[parser]
impl<I: Input> Iterator for Lexer<I> {
type Item = TokenAndSpan;
fn next(&mut self) -> Option<Self::Item> {
self.state.had_line_break = self.state.is_first;
self.state.is_first = false;
// skip spaces before getting next character, if we are allowed to.
if self.state.can_skip_space() {
self.skip_space()
};
let start = cur_pos!();
if self.state.is_in_template() {
let token = self.read_tmpl_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err));
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token,
span: span!(start),
});
}
if let Some(token) = self.read_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err))
{
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token,
span: span!(start),
});
}
None
}
}
impl State {
pub fn new() -> Self {
State {
is_expr_allowed: true,
octal_pos: None,
is_first: true,
had_line_break: false,
context: Context(vec![Type::BraceStmt]),
token_type: None,
}
}
pub fn can_skip_space(&self) -> bool {
!self.context
.current()
.map(|t| t.preserve_space())
.unwrap_or(false)
}
fn is_in_template(&self) -> bool {
self.context.current() == Some(Type::Tpl)
}
pub fn last_was_tpl_element(&self) -> bool {
match self.token_type {
Some(Template(..)) => true,
_ => false,
}
}
fn update(&mut self, logger: &Logger, next: &Token) {
trace!(
logger,
"updating state: next={:?}, had_line_break={} ",
next,
self.had_line_break
);
let prev = self.token_type.take();
self.token_type = Some(next.clone());
self.is_expr_allowed = Self::is_expr_allowed_on_next(
logger,
&mut self.context,
prev,
next,
self.had_line_break,
self.is_expr_allowed,
);
}
/// `is_expr_allowed`: previous value.
fn is_expr_allowed_on_next(
logger: &Logger,
context: &mut Context,
prev: Option<Token>,
next: &Token,
had_line_break: bool,
is_expr_allowed: bool,
) -> bool {
let is_next_keyword = match next {
&Word(Keyword(..)) => true,
_ => false,
};
if is_next_keyword && prev == Some(Token::Dot) {
return false;
} else {
// ported updateContext
match *next {
tok!(')') | tok!('}') => {
// TODO: Verify
if context.len() == 1 {
return true;
}
let out = context.pop(logger).unwrap();
// let a = function(){}
if out == Type::BraceStmt && context.current() == Some(Type::FnExpr) {
context.pop(logger);
return false;
}
// ${} in template
if out == Type::TplQuasi {
return true;
}
// expression cannot follow expression
return !out.is_expr();
}
tok!("function") => {
// This is required to lex
// `x = function(){}/42/i`
if is_expr_allowed
&& !context.is_brace_block(prev, had_line_break, is_expr_allowed)
{
context.push(logger, Type::FnExpr);
}
return false;
}
// for (a of b) {}
tok!("of") if Some(Type::ParenStmt { is_for_loop: true }) == context.current() => {
// e.g. for (a of _) => true
!prev.expect("context.current() if ParenStmt, so prev token cannot be None")
.before_expr()
}
Word(Ident(ref ident)) => {
// variable declaration
return match prev {
Some(prev) => match prev {
// handle automatic semicolon insertion.
Word(Keyword(Let)) | Word(Keyword(Const)) | Word(Keyword(Var))
if had_line_break =>
{
true
}
_ => false,
},
_ => false,
};
}
tok!('{') => {
let next_ctxt = if context.is_brace_block(prev, had_line_break, is_expr_allowed)
{
Type::BraceStmt
} else {
Type::BraceExpr
};
context.push(logger, next_ctxt);
true
}
tok!("${") => {
context.push(logger, Type::TplQuasi);
return true;
}
tok!('(') => {
// if, for, with, while is statement
context.push(
logger,
match prev {
Some(Word(Keyword(k))) => match k {
If | With | While => Type::ParenStmt { is_for_loop: false },
For => Type::ParenStmt { is_for_loop: true },
_ => Type::ParenExpr,
},
_ => Type::ParenExpr,
},
);
return true;
}
// remains unchanged.
tok!("++") | tok!("--") => is_expr_allowed,
tok!('`') => {
// If we are in template, ` terminates template.
if context.current() == Some(Type::Tpl) {
context.pop(logger);
} else {
context.push(logger, Type::Tpl);
}
return false;
}
_ => {
return next.before_expr();
}
}
}
}
}
#[derive(Debug, Default)]
struct Context(Vec<Type>);
impl Context {
/// Returns true if following `LBrace` token is `block statement` according to
/// `ctx`, `prev`, `is_expr_allowed`.
fn is_brace_block(
&self,
prev: Option<Token>,
had_line_break: bool,
is_expr_allowed: bool,
) -> bool {
match prev {
Some(tok!(':')) => match self.current() {
Some(Type::BraceStmt) => return true,
// `{ a: {} }`
// ^ ^
Some(Type::BraceExpr) => return false,
_ => {}
},
_ => {}
}
match prev {
// function a() {
// return { a: "" };
// }
// function a() {
// return
// {
// function b(){}
// };
// }
Some(tok!("return")) | Some(tok!("yield")) => {
return had_line_break;
}
Some(tok!("else")) | Some(Semi) | None | Some(tok!(')')) => return true,
// If previous token was `{`
Some(tok!('{')) => return self.current() == Some(Type::BraceStmt),
// `class C<T> { ... }`
Some(tok!('<')) | Some(tok!('>')) => return true,
_ => {}
}
return !is_expr_allowed;
}
fn len(&self) -> usize {
self.0.len()
}
fn pop(&mut self, logger: &Logger) -> Option<Type> {
let opt = self.0.pop();
trace!(logger, "context.pop({:?})", opt);
opt
}
fn current(&self) -> Option<Type> {
self.0.last().cloned()
}
fn push(&mut self, logger: &Logger, t: Type) {
trace!(logger, "context.push({:?})", t);
self.0.push(t)
}
}
/// The algorithm used to determine whether a regexp can appear at a
/// given point in the program is loosely based on sweet.js' approach.
/// See https://github.com/mozilla/sweet.js/wiki/design
///
#[derive(Debug, Clone, Copy, PartialEq, Eq, Kind)]
#[kind(fucntion(is_expr = "bool", preserve_space = "bool"))]
enum Type {
BraceStmt,
#[kind(is_expr)]
BraceExpr,
#[kind(is_expr)]
TplQuasi,
ParenStmt {
/// Is this `for` loop?
is_for_loop: bool,
},
#[kind(is_expr)]
ParenExpr,
#[kind(is_expr, preserve_space)]
Tpl,
#[kind(is_expr)]
FnExpr,
}

View File

@ -0,0 +1,498 @@
use super::*;
use super::input::CharIndices;
use std::ops::Range;
use std::str;
fn make_lexer(s: &'static str) -> Lexer<CharIndices<'static>> {
let logger = ::testing::logger().new(o!("src" => s));
Lexer::new_from_str(logger, s)
}
fn lex(s: &'static str) -> Vec<TokenAndSpan> {
println!("Source:\n{}", s);
let lexer = make_lexer(&s);
lexer.collect()
}
fn lex_tokens(s: &'static str) -> Vec<Token> {
let lexer = make_lexer(&s);
lexer.map(|ts| ts.token).collect()
}
trait SpanRange: Sized {
fn into_span(self) -> Span;
}
impl SpanRange for usize {
fn into_span(self) -> Span {
Span {
start: BytePos(self as _),
end: BytePos(self as _),
}
}
}
impl SpanRange for Range<usize> {
fn into_span(self) -> Span {
Span {
start: BytePos(self.start as _),
end: BytePos((self.end - 1) as _),
}
}
}
trait WithSpan: Sized {
fn span<R>(self, span: R) -> TokenAndSpan
where
R: SpanRange,
{
TokenAndSpan {
token: self.into_token(),
span: span.into_span(),
}
}
fn into_token(self) -> Token;
}
impl WithSpan for Token {
fn into_token(self) -> Token {
self
}
}
impl WithSpan for usize {
fn into_token(self) -> Token {
Num(Number(self as f64))
}
}
impl WithSpan for f64 {
fn into_token(self) -> Token {
Num(Number(self))
}
}
impl<'a> WithSpan for &'a str {
fn into_token(self) -> Token {
Word(Ident(self.into()))
}
}
impl WithSpan for Keyword {
fn into_token(self) -> Token {
Word(Keyword(self))
}
}
impl WithSpan for Word {
fn into_token(self) -> Token {
Word(self)
}
}
impl WithSpan for BinOpToken {
fn into_token(self) -> Token {
BinOp(self)
}
}
impl WithSpan for AssignOpToken {
fn into_token(self) -> Token {
AssignOp(self)
}
}
#[test]
fn test262_lexer_error_0001() {
assert_eq!(
vec![
123f64.span(0..4),
Dot.span(4..5),
"a".span(5..6),
LParen.span(6..7),
1.span(7..8),
RParen.span(8..9),
],
lex("123..a(1)")
)
}
#[test]
fn test262_lexer_error_0002() {
assert_eq!(
vec![Str("use strict".into(), false).span(0..15), Semi.span(15)],
lex(r#"'use\x20strict';"#)
);
}
#[test]
fn test262_lexer_error_0003() {
assert_eq!(vec!["a".span(0..6)], lex(r#"\u0061"#));
}
#[test]
fn test262_lexer_error_0004() {
assert_eq!(
vec![tok!('+'), tok!('{'), tok!('}'), tok!('/'), 1.into_token()],
lex_tokens("+{} / 1")
);
}
#[test]
fn ident_escape_unicode() {
assert_eq!(vec!["aa".span(0..7)], lex(r#"a\u0061"#));
}
#[test]
fn ident_escape_unicode_2() {
assert_eq!(lex("℘℘"), vec!["℘℘".span(0..4)]);
assert_eq!(lex(r#"℘\u2118"#), vec!["℘℘".span(0..9)]);
}
#[test]
fn str_escape_hex() {
assert_eq!(lex(r#"'\x61'"#), vec![Str("a".into(), false).span(0..6)]);
}
#[test]
fn str_escape_octal() {
assert_eq!(
lex(r#"'Hello\012World'"#),
vec![Str("Hello\nWorld".into(), false).span(0..16)]
)
}
#[test]
fn str_escape_unicode_long() {
assert_eq!(
lex(r#"'\u{00000000034}'"#),
vec![Str("4".into(), false).span(0..17)]
);
}
#[test]
fn regexp_unary_void() {
assert_eq!(
lex("void /test/"),
vec![Void.span(0..4), Regex("test".into(), "".into()).span(5..11)]
);
assert_eq!(
lex("void (/test/)"),
vec![
Void.span(0..4),
LParen.span(5..6),
Regex("test".into(), "".into()).span(6..12),
RParen.span(12..13),
]
);
}
#[test]
fn non_regexp_unary_plus() {
assert_eq!(
lex("+{} / 1"),
vec![
tok!('+').span(0..1),
tok!('{').span(1..2),
tok!('}').span(2..3),
tok!('/').span(4..5),
1.span(6..7),
]
);
}
// ----------
#[test]
fn invalid_but_lexable() {
assert_eq!(
vec![LParen.span(0), LBrace.span(1), Semi.span(2)],
lex("({;")
);
}
#[test]
fn paren_semi() {
assert_eq!(
vec![LParen.span(0), RParen.span(1), Semi.span(2)],
lex("();")
);
}
#[test]
fn ident_paren() {
assert_eq!(
vec![
"a".span(0),
LParen.span(1),
"bc".span(2..4),
RParen.span(4),
Semi.span(5),
],
lex("a(bc);")
);
}
#[test]
fn read_word() {
assert_eq!(vec!["a".span(0), "b".span(2), "c".span(4)], lex("a b c"),)
}
#[test]
fn simple_regex() {
assert_eq!(
vec![
"x".span(0),
Assign.span(2),
Regex("42".into(), "i".into()).span(4..9),
],
lex("x = /42/i")
);
assert_eq!(vec![Regex("42".into(), "".into()).span(0..4)], lex("/42/"));
}
#[test]
fn complex_regex() {
assert_eq!(
vec![
Word(Ident("f".into())),
LParen,
RParen,
Semi,
Word(Keyword(Function)),
Word(Ident("foo".into())),
LParen,
RParen,
LBrace,
RBrace,
Regex("42".into(), "i".into()),
],
lex_tokens("f(); function foo() {} /42/i"),
"/ should be parsed as regexp"
)
}
#[test]
fn simple_div() {
assert_eq!(vec!["a".span(0), Div.span(2), "b".span(4)], lex("a / b"));
}
#[test]
fn complex_divide() {
assert_eq!(
vec![
Word(Ident("x".into())),
AssignOp(Assign),
Word(Keyword(Function)),
Word(Ident("foo".into())),
LParen,
RParen,
LBrace,
RBrace,
BinOp(Div),
Word(Ident("a".into())),
BinOp(Div),
Word(Ident("i".into())),
],
lex_tokens("x = function foo() {} /a/i"),
"/ should be parsed as div operator"
)
}
// ---------- Tests from tc39 spec
#[test]
fn spec_001() {
let expected = vec![
Word(Ident("a".into())),
AssignOp(Assign),
Word(Ident("b".into())),
BinOp(Div),
Word(Ident("hi".into())),
BinOp(Div),
Word(Ident("g".into())),
Dot,
Word(Ident("exec".into())),
LParen,
Word(Ident("c".into())),
RParen,
Dot,
Word(Ident("map".into())),
LParen,
Word(Ident("d".into())),
RParen,
Semi,
];
assert_eq!(
expected,
lex_tokens(
"a = b
/hi/g.exec(c).map(d);"
)
);
assert_eq!(expected, lex_tokens("a = b / hi / g.exec(c).map(d);"));
}
// ---------- Tests ported from esprima
#[test]
fn after_if() {
assert_eq!(
vec![
Keyword::If.span(0..2),
LParen.span(2),
"x".span(3),
RParen.span(4),
LBrace.span(5),
RBrace.span(6),
Regex("y".into(), "".into()).span(8..11),
Dot.span(11),
"test".span(12..16),
LParen.span(16),
"z".span(17),
RParen.span(18),
],
lex("if(x){} /y/.test(z)"),
)
}
#[test]
fn empty() {
assert_eq!(lex(""), vec![]);
}
#[test]
#[ignore]
fn invalid_number_failure() {
unimplemented!()
}
// #[test]
// #[ignore]
// fn leading_comment() {
// assert_eq!(
// vec![
// BlockComment(" hello world ".into()).span(0..17),
// Regex("42".into(), "".into()).span(17..21),
// ],
// lex("/* hello world */ /42/")
// )
// }
// #[test]
// #[ignore]
// fn line_comment() {
// assert_eq!(
// vec![
// Keyword::Var.span(0..3),
// "answer".span(4..10),
// Assign.span(11),
// 42.span(13..15),
// LineComment(" the Ultimate".into()).span(17..32),
// ],
// lex("var answer = 42 // the Ultimate"),
// )
// }
#[test]
fn migrated_0002() {
assert_eq!(
vec![
"tokenize".span(0..8),
LParen.span(8),
Regex("42".into(), "".into()).span(9..13),
RParen.span(13),
],
lex("tokenize(/42/)")
)
}
#[test]
fn migrated_0003() {
assert_eq!(
vec![
LParen.span(0),
Word::False.span(1..6),
RParen.span(6),
Div.span(8),
42.span(9..11),
Div.span(11),
],
lex("(false) /42/"),
)
}
#[test]
fn migrated_0004() {
assert_eq!(
vec![
Function.span(0..8),
"f".span(9),
LParen.span(10),
RParen.span(11),
LBrace.span(12),
RBrace.span(13),
Regex("42".into(), "".into()).span(15..19),
],
lex("function f(){} /42/")
);
}
// This test seems wrong.
//
// #[test]
// fn migrated_0005() {
// assert_eq!(
// vec![
// Function.span(0..8),
// LParen.span(9),
// RParen.span(10),
// LBrace.span(11),
// RBrace.span(12),
// Div.span(13),
// 42.span(14..16),
// ],
// lex("function (){} /42")
// );
// }
#[test]
fn migrated_0006() {
// This test seems wrong.
// assert_eq!(
// vec![LBrace.span(0), RBrace.span(1), Div.span(3), 42.span(4..6)],
// lex("{} /42")
// )
assert_eq!(
vec![
LBrace.span(0),
RBrace.span(1),
Regex("42".into(), "".into()).span(3..7),
],
lex("{} /42/")
)
}
#[test]
fn str_lit() {
assert_eq!(vec![Str("abcde".into(), false)], lex_tokens("'abcde'"));
assert_eq!(vec![Str("abcde".into(), true)], lex_tokens(r#""abcde""#));
assert_eq!(vec![Str("abc".into(), false)], lex_tokens("'\\\nabc'"));
}
#[test]
fn tpl_empty() {
assert_eq!(
lex_tokens(r#"``"#),
vec![tok!('`'), Template("".into()), tok!('`')]
)
}
#[test]
fn tpl() {
assert_eq!(
lex_tokens(r#"`${a}`"#),
vec![
tok!('`'),
Template("".into()),
tok!("${"),
Word(Ident("a".into())),
tok!('}'),
Template("".into()),
tok!('`'),
]
)
}

View File

@ -0,0 +1,175 @@
//! Ported from [babylon/util/identifier.js][]
//!
//!
//! [babylon/util/identifier.js]:\
//! https://github.com/babel/babel/blob/master/packages/babylon/src/util/identifier.js
//!
//!
//! Note: Currently this use xid instead of id. (because unicode_xid crate
//! exists)
use super::Lexer;
use super::input::Input;
use parser_macros::parser;
use unicode_xid::UnicodeXID;
pub const BACKSPACE: char = 8 as char;
pub const SHIFT_OUT: char = 14 as char;
pub const OGHAM_SPACE_MARK: char = '\u{1680}'; // ''
pub const LINE_FEED: char = '\n';
pub const LINE_SEPARATOR: char = '\u{2028}';
pub const PARAGRAPH_SEPARATOR: char = '\u{2029}';
#[parser]
impl<I: Input> Lexer<I> {
/// Skip comments or whitespaces.
///
/// See https://tc39.github.io/ecma262/#sec-white-space
pub(super) fn skip_space(&mut self) {
let mut line_break = false;
while let Some(c) = cur!() {
match c {
// white spaces
_ if c.is_ws() => {}
// line breaks
_ if c.is_line_break() => {
self.state.had_line_break = true;
}
'/' => {
if peek!() == Some('/') {
self.skip_line_comment(2);
continue;
} else if peek!() == Some('*') {
self.skip_block_comment();
continue;
}
break;
}
_ => break,
}
bump!();
}
}
pub(super) fn skip_line_comment(&mut self, start_skip: usize) {
let start = cur_pos!();
for _ in 0..start_skip {
bump!();
}
while let Some(c) = cur!() {
bump!();
if c.is_line_break() {
self.state.had_line_break = true;
}
match c {
'\n' | '\r' | '\u{2028}' | '\u{2029}' => {
break;
}
_ => {}
}
}
// TODO: push comment
}
/// Expects current char to be '/' and next char to be '*'.
pub(super) fn skip_block_comment(&mut self) {
let start = cur_pos!();
debug_assert_eq!(cur!(), Some('/'));
debug_assert_eq!(peek!(), Some('*'));
bump!();
bump!();
let mut was_star = false;
while let Some(c) = cur!() {
if was_star && is!('/') {
bump!();
// TODO: push comment
return;
}
if c.is_line_break() {
self.state.had_line_break = true;
}
was_star = is!('*');
bump!();
}
unimplemented!("error: unterminated block comment");
}
}
/// Implemented for `char`.
pub trait CharExt: Copy {
fn to_char(self) -> Option<char>;
/// Test whether a given character code starts an identifier.
///
/// https://tc39.github.io/ecma262/#prod-IdentifierStart
fn is_ident_start(self) -> bool {
let c = match self.to_char() {
Some(c) => c,
None => return false,
};
// TODO: Use Unicode ID instead of XID.
c == '$' || c == '_' || UnicodeXID::is_xid_start(c)
}
/// Test whether a given character is part of an identifier.
fn is_ident_part(self) -> bool {
let c = match self.to_char() {
Some(c) => c,
None => return false,
};
// TODO: Use Unicode ID instead of XID.
c == '$' || c == '_' || c == '\u{200c}' || c == '\u{200d}' || UnicodeXID::is_xid_continue(c)
}
/// See https://tc39.github.io/ecma262/#sec-line-terminators
fn is_line_break(self) -> bool {
let c = match self.to_char() {
Some(c) => c,
None => return false,
};
match c {
'\r' | '\n' | '\u{2028}' | '\u{2029}' => true,
_ => false,
}
}
/// See https://tc39.github.io/ecma262/#sec-white-space
fn is_ws(self) -> bool {
let c = match self.to_char() {
Some(c) => c,
None => return false,
};
match c {
'\u{0009}' | '\u{000b}' | '\u{000c}' | '\u{0020}' | '\u{00a0}' | '\u{feff}' => true,
'\u{1680}'
| '\u{180e}'
| '\u{2000}'...'\u{200a}'
| '\u{202f}'
| '\u{205f}'
| '\u{3000}' => {
// Any other Unicode “Space_Separator” code point
true
}
_ => false,
}
}
}
impl CharExt for char {
#[inline(always)]
fn to_char(self) -> Option<char> {
Some(self)
}
}

View File

@ -0,0 +1,36 @@
#![feature(box_syntax)]
#![feature(box_patterns)]
#![feature(const_fn)]
#![feature(specialization)]
#![feature(never_type)]
#![feature(nll)]
#![feature(proc_macro)]
#![feature(try_from)]
#![feature(try_trait)]
#![cfg_attr(test, feature(conservative_impl_trait))]
#![deny(unreachable_patterns)]
#![deny(unsafe_code)]
extern crate either;
#[macro_use]
extern crate failure;
extern crate parser_macros;
#[macro_use]
extern crate slog;
#[macro_use(js_word)]
extern crate swc_atoms;
extern crate swc_common;
#[macro_use]
extern crate swc_macros;
#[cfg(test)]
#[macro_use]
extern crate testing;
extern crate unicode_xid;
pub extern crate swc_ecma_ast as ast;
#[macro_use]
mod macros;
pub mod error;
pub mod lexer;
pub mod token;
pub mod parser;

View File

@ -0,0 +1,117 @@
macro_rules! tok {
('`') => { Token::BackQuote };
// (';') => { Token::Semi };
(',') => { Token::Comma };
('?') => { Token::QuestionMark };
(':') => { Token::Colon };
("::") => { Token::ColonColon };
('.') => { Token::Dot };
("=>") => { Token::Arrow };
("...") => { Token::DotDotDot };
("${") => { Token::DollarLBrace };
('+') => { Token::BinOp(Add) };
('-') => { Token::BinOp(Sub) };
('*') => { Token::BinOp(Mul) };
('/') => { Token::BinOp(Div) };
("/=") => { Token::AssignOp(DivAssign) };
('%') => { Token::BinOp(Mod) };
('!') => { Token::Bang };
('~') => { Token::Tilde };
('<') => { Token::BinOp(Lt) };
('>') => { Token::BinOp(Gt) };
("++") => { Token::PlusPlus };
("--") => { Token::MinusMinus };
('=') => { Token::AssignOp(Assign) };
('(') => { Token::LParen };
(')') => { Token::RParen };
('{') => { Token::LBrace };
('}') => { Token::RBrace };
('[') => { Token::LBracket };
(']') => { Token::RBracket };
("async") => { Token::Word(Word::Ident(js_word!("async"))) };
("as") => { Token::Word(Word::Ident(js_word!("as"))) };
("await") => { Token::Word(Keyword(Await)) };
("case") => { Token::Word(Keyword(Case)) };
("catch") => { Token::Word(Keyword(Catch)) };
("class") => { Token::Word(Keyword(Class)) };
("const") => { Token::Word(Keyword(Const)) };
("default") => { Token::Word(Keyword(Default_)) };
("delete") => { Token::Word(Keyword(Delete)) };
("do") => { Token::Word(Keyword(Do)) };
("else") => { Token::Word(Keyword(Else)) };
("export") => { Token::Word(Keyword(Export)) };
("extends") => { Token::Word(Keyword(Extends)) };
("false") => { Token::Word(False) };
("finally") => { Token::Word(Keyword(Finally)) };
("for") => { Token::Word(Keyword(For)) };
("from") => { Token::Word(Word::Ident(js_word!("from"))) };
("function") => { Token::Word(Keyword(Function)) };
("if") => { Token::Word(Keyword(If)) };
("in") => { Token::Word(Keyword(In)) };
("import") => { Token::Word(Keyword(Import)) };
("let") => { Token::Word(Keyword(Let)) };
("new") => { Token::Word(Keyword(New)) };
("null") => { Token::Word(Null) };
("of") => { Token::Word(Ident(js_word!("of"))) };
("return") => { Token::Word(Keyword(Return)) };
("super") => { Token::Word(Keyword(Super)) };
("static") => { Token::Word(Word::Ident(js_word!("static"))) };
("switch") => { Token::Word(Keyword(Switch)) };
("target") => { Token::Word(Word::Ident(js_word!("target"))) };
("this") => { Token::Word(Keyword(This)) };
("throw") => { Token::Word(Keyword(Throw)) };
("true") => { Token::Word(True) };
("try") => { Token::Word(Keyword(Try)) };
("typeof") => { Token::Word(Keyword(TypeOf)) };
("var") => { Token::Word(Keyword(Var)) };
("void") => { Token::Word(Keyword(Void)) };
("while") => { Token::Word(Keyword(While)) };
("with") => { Token::Word(Keyword(With)) };
("yield") => { Token::Word(Keyword(Yield)) };
}
macro_rules! token_including_semi {
(';') => { Token::Semi };
($t:tt) => { tok!($t) };
}
/// This macro requires macro named 'last_pos' to be in scope.
macro_rules! span {
($p:expr, $start:expr) => {
Span { start: $start, end: last_pos!($p), }
};
}
/// Takes `(parser, start)`, Returns |t| { Spanned::from }
macro_rules! into_spanned {
($p:expr, $start:expr) => {{
|val| {
let start = $start;
let end = last_pos!($p);
return ::swc_common::Spanned::from_unspanned(val, Span { start, end });
}
}}
}
macro_rules! spanned {
(
$p:expr, { $($body:tt)* }
) => {{
let start = cur_pos!($p);
let val: Result<_, _> = {
$($body)*
};
#[allow(unreachable_code)]
{
val.map(into_spanned!($p, start))
}
}};
}

View File

@ -0,0 +1,402 @@
//! Parser for function expression and function declaration.
use super::*;
use super::ident::MaybeOptionalIdentParser;
#[parser]
impl<I: Input> Parser<I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
/// Parse function expression
pub(super) fn parse_fn_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_fn(None)
}
pub(super) fn parse_async_fn_decl(&mut self) -> PResult<Decl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_fn_decl(&mut self) -> PResult<Decl> {
self.parse_fn(None)
}
pub(super) fn parse_default_async_fn(&mut self) -> PResult<ExportDefaultDecl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_default_fn(&mut self) -> PResult<ExportDefaultDecl> {
self.parse_fn(None)
}
pub(super) fn parse_class_decl(&mut self) -> PResult<Decl> {
self.parse_class()
}
pub(super) fn parse_class_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_class()
}
pub(super) fn parse_default_class(&mut self) -> PResult<ExportDefaultDecl> {
self.parse_class()
}
fn parse_class<T>(&mut self) -> PResult<T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
{
let start = cur_pos!();
expect!("class");
let ident = self.parse_maybe_opt_binding_ident()?;
let super_class = if eat!("extends") {
self.parse_lhs_expr().map(Some)?
} else {
None
};
expect!('{');
let body = self.parse_class_body()?;
expect!('}');
let end = last_pos!();
Ok(T::finish_class(
ident,
Class {
span: Span { start, end },
super_class,
body,
},
))
}
fn parse_class_body(&mut self) -> PResult<Vec<ClassMethod>> {
let mut elems = vec![];
while !eof!() && !is!('}') {
if eat_exact!(';') {
continue;
}
elems.push(self.parse_class_element()?);
}
Ok(elems)
}
fn parse_class_element(&mut self) -> PResult<ClassMethod> {
// ignore semi
let start_of_static = {
let pos = cur_pos!();
if eat!("static") {
Some(pos)
} else {
None
}
};
self.parse_method_def(start_of_static)
}
fn parse_fn<T>(&mut self, start_of_async: Option<BytePos>) -> PResult<T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
{
let start = start_of_async.unwrap_or(cur_pos!());
assert_and_bump!("function");
let is_async = start_of_async.is_some();
if is_async && is!('*') {
syntax_error!(SyntaxError::AsyncGenerator);
}
let is_generator = eat!('*');
let ident = self.parse_maybe_opt_binding_ident()?;
expect!('(');
let params = self.parse_formal_params()?;
expect!(')');
let body = self.parse_fn_body(is_async, is_generator)?;
Ok(T::finish_fn(
ident,
Function {
span: Span {
start,
end: last_pos!(),
},
is_async,
is_generator,
params,
body,
},
))
}
/// `parse_args` closure should not eat '(' or ')'.
pub(super) fn parse_fn_args_body<F>(
&mut self,
start: BytePos,
parse_args: F,
is_async: bool,
is_generator: bool,
) -> PResult<Function>
where
F: FnOnce(&mut Self) -> PResult<Vec<Pat>>,
{
self.with_ctx(Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_with(|mut p| {
expect!(p, '(');
let params = parse_args(&mut p)?;
expect!(p, ')');
let body = p.parse_fn_body(is_async, is_generator)?;
Ok(Function {
span: span!(p, start),
params,
body,
is_async,
is_generator,
})
})
}
fn parse_method_def(&mut self, start_of_static: Option<BytePos>) -> PResult<ClassMethod> {
let is_static = start_of_static.is_some();
let start = start_of_static.unwrap_or(cur_pos!());
if eat!('*') {
let key = self.parse_prop_name()?;
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, false, true)
.map(|function| ClassMethod {
is_static,
key,
function,
kind: ClassMethodKind::Method,
});
}
// Handle static(){}
if let Some(start_of_static) = start_of_static {
if is!('(') {
let span_of_static = span!(start_of_static);
return self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
false,
false,
).map(|function| ClassMethod {
is_static: false,
key: PropName::Ident(Ident {
span: span_of_static,
sym: js_word!("static"),
}),
function,
kind: ClassMethodKind::Method,
});
}
}
let key = self.parse_prop_name()?;
// Handle `a(){}` (and async(){} / get(){} / set(){})
if is!('(') {
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, false, false)
.map(|function| ClassMethod {
is_static,
key,
function,
kind: ClassMethodKind::Method,
});
}
let ident = match key {
PropName::Ident(ident) => ident,
_ => unexpected!(),
};
// get a(){}
// set a(v){}
// async a(){}
match ident.sym {
js_word!("get") | js_word!("set") | js_word!("async") => {
let key = self.parse_prop_name()?;
return match ident.sym {
js_word!("get") => self.parse_fn_args_body(start, |_| Ok(vec![]), false, false)
.map(|function| ClassMethod {
is_static,
key,
function,
kind: ClassMethodKind::Getter,
}),
js_word!("set") => self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
false,
false,
).map(|function| ClassMethod {
key,
is_static,
function,
kind: ClassMethodKind::Setter,
}),
js_word!("async") => self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
true,
false,
).map(|function| ClassMethod {
is_static,
key,
function,
kind: ClassMethodKind::Method,
}),
_ => unreachable!(),
};
}
_ => unexpected!(),
}
}
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<T>
where
Self: FnBodyParser<T>,
{
self.with_ctx(Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_fn_body_inner()
}
}
trait OutputType {
type Ident;
fn finish_fn(ident: Self::Ident, f: Function) -> Self;
fn finish_class(ident: Self::Ident, class: Class) -> Self;
}
impl OutputType for Box<Expr> {
type Ident = Option<Ident>;
fn finish_fn(ident: Option<Ident>, function: Function) -> Self {
box Expr {
span: function.span,
node: ExprKind::Function(FnExpr { ident, function }),
}
}
fn finish_class(ident: Option<Ident>, class: Class) -> Self {
box Expr {
span: class.span,
node: ExprKind::Class(ClassExpr { ident, class }),
}
}
}
impl OutputType for ExportDefaultDecl {
type Ident = Option<Ident>;
fn finish_fn(ident: Option<Ident>, function: Function) -> Self {
ExportDefaultDecl::Fn { ident, function }
}
fn finish_class(ident: Option<Ident>, class: Class) -> Self {
ExportDefaultDecl::Class { ident, class }
}
}
impl OutputType for Decl {
type Ident = Ident;
fn finish_fn(ident: Ident, function: Function) -> Self {
Decl::Fn { ident, function }
}
fn finish_class(ident: Ident, class: Class) -> Self {
Decl::Class(ClassDecl { ident, class })
}
}
pub(super) trait FnBodyParser<Body> {
fn parse_fn_body_inner(&mut self) -> PResult<Body>;
}
#[parser]
impl<I: Input> FnBodyParser<BlockStmtOrExpr> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmtOrExpr> {
if is!('{') {
self.parse_block().map(BlockStmtOrExpr::BlockStmt)
} else {
self.parse_assignment_expr().map(BlockStmtOrExpr::Expr)
}
}
}
impl<I: Input> FnBodyParser<BlockStmt> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmt> {
self.parse_block()
}
}
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
fn lhs(s: &'static str) -> Box<Expr> {
mk(s)
.parse_lhs_expr()
.expect("failed to parse lhs expression")
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
#[test]
fn class_expr() {
assert_eq_ignore_span!(
expr("(class extends a {})"),
box Expr {
span,
node: ExprKind::Paren(box Expr {
span,
node: ExprKind::Class(ClassExpr {
ident: None,
class: Class {
span,
body: vec![],
super_class: Some(expr("a")),
},
}),
}),
}
);
}
}

View File

@ -0,0 +1,638 @@
use super::*;
use std::iter;
mod ops;
#[cfg(test)]
mod tests;
#[parser]
impl<I: Input> Parser<I> {
pub fn parse_expr(&mut self) -> PResult<Box<Expr>> {
let expr = self.parse_assignment_expr()?;
let start = expr.span.start;
if is!(',') {
let mut exprs = vec![expr];
while eat!(',') {
exprs.push(self.parse_assignment_expr()?);
}
let end = exprs.last().unwrap().span.end;
return Ok(box Expr {
span: Span { start, end },
node: ExprKind::Seq { exprs },
});
}
Ok(expr)
}
/// Parse an assignment expression. This includes applications of
/// operators like `+=`.
///
pub(super) fn parse_assignment_expr(&mut self) -> PResult<Box<Expr>> {
if self.ctx.in_generator && is!("yield") {
return self.parse_yield_expr();
}
let start = cur_pos!();
self.state.potential_arrow_start = match *cur!()? {
Word(Ident(..)) | tok!('(') | tok!("yield") => Some(start),
_ => None,
};
// self.parse_arrow_fn();
// self.parse_async_arrow_fn();
// Try to parse conditional expression.
let cond = self.parse_cond_expr()?;
return_if_arrow!(cond);
match cond.node {
// if cond is conditional expression but not left-hand-side expression,
// just return it.
ExprKind::Cond { .. }
| ExprKind::Binary { .. }
| ExprKind::Unary { .. }
| ExprKind::Update { .. } => return Ok(cond),
_ => {}
}
match cur!() {
Some(&AssignOp(op)) => {
bump!();
let right = self.parse_assignment_expr()?;
Ok(box Expr {
span: span!(start),
node: ExprKind::Assign {
op,
// TODO:
left: PatOrExpr::Expr(cond),
right,
},
})
}
_ => Ok(cond),
}
}
/// Spec: 'ConditionalExpression'
fn parse_cond_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
let test = self.parse_bin_expr()?;
return_if_arrow!(test);
if eat!('?') {
let cons = self.include_in_expr(true).parse_assignment_expr()?;
expect!(':');
let alt = self.parse_assignment_expr()?;
Ok(ExprKind::Cond { test, cons, alt })
} else {
return Ok(test);
}
})
}
/// Parse a primary expression or arrow function
fn parse_primary_expr(&mut self) -> PResult<Box<Expr>> {
let can_be_arrow = self.state
.potential_arrow_start
.map(|s| s == cur_pos!())
.unwrap_or(false);
// debug!(
// self.logger,
// "Parsing a primary expression. cur={:?} can_be_arrow={}",
// cur!(),
// can_be_arrow
// );
let t = cur!()?;
match *t {
tok!("this") => {
return spanned!({
assert_and_bump!("this");
Ok(ExprKind::This)
});
}
tok!("async") => {
// Handle async function expression
if peeked_is!("function") && !self.input.has_linebreak_between_cur_and_peeked() {
return self.parse_async_fn_expr();
}
}
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => {
return spanned!({ self.parse_lit().map(ExprKind::Lit) })
}
tok!('[') => return self.parse_array_lit(),
tok!('{') => return self.parse_object(),
// Handle FunctionExpression and GeneratorExpression
tok!("function") => return self.parse_fn_expr(),
tok!("class") => return self.parse_class_expr(),
Regex(_, _) => {
return spanned!({
match bump!() {
Regex(exp, flags) => Ok(ExprKind::Lit(Lit::Regex(Regex { exp, flags }))),
_ => unreachable!(),
}
});
}
tok!('`') => {
return spanned!({
// parse template literal
Ok(ExprKind::Tpl(self.parse_tpl_lit(None)?))
});
}
tok!('(') => {
return self.parse_paren_expr_or_arrow_fn(can_be_arrow);
}
_ => {}
}
if is!("let") || is!(IdentRef) {
return spanned!({
// TODO: Handle [Yield, Await]
let id = self.parse_ident_ref()?;
if can_be_arrow && id.sym == js_word!("async") && is!(BindingIdent) {
// async a => body
let arg = self.parse_binding_ident().map(Pat::from)?;
let params = vec![arg];
expect!("=>");
let body = self.parse_fn_body(true, false)?;
Ok(ExprKind::Arrow {
body,
params,
is_async: true,
is_generator: false,
})
} else if can_be_arrow && !is!(';') && eat!("=>") {
// async is parameter
let params = vec![id.into()];
let body = self.parse_fn_body(false, false)?;
Ok(ExprKind::Arrow {
body,
params,
is_async: false,
is_generator: false,
})
} else {
return Ok(id.into());
}
});
}
unexpected!()
}
fn parse_array_lit(&mut self) -> PResult<Box<Expr>> {
spanned!({
assert_and_bump!('[');
let mut elems = vec![];
let mut comma = 0;
while !eof!() && !is!(']') {
if eat!(',') {
comma += 1;
continue;
}
elems.extend(iter::repeat(None).take(comma));
comma = 0;
elems.push(self.include_in_expr(true).parse_expr_or_spread().map(Some)?);
}
expect!(']');
Ok(ExprKind::Array { elems })
})
}
fn parse_member_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_member_expr_or_new_expr(false)
}
/// `is_new_expr`: true iff we are parsing production 'NewExpression'.
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<Box<Expr>> {
let start = cur_pos!();
if eat!("new") {
let span_of_new = span!(start);
if eat!('.') {
let start_of_target = cur_pos!();
if eat!("target") {
return Ok(box Expr {
span: span!(start),
node: ExprKind::MetaProp {
meta: Ident {
span: span_of_new,
sym: js_word!("new"),
},
prop: Ident {
span: span!(start_of_target),
sym: js_word!("target"),
},
},
});
}
unexpected!()
}
// 'NewExpression' allows new call without paren.
let callee = self.parse_member_expr_or_new_expr(is_new_expr)?;
if !is_new_expr || is!('(') {
// Parsed with 'MemberExpression' production.
let args = self.parse_args().map(Some)?;
// We should parse subscripts for MemberExpression.
return self.parse_subscripts(
ExprOrSuper::Expr(box Expr {
span: span!(start),
node: ExprKind::New { callee, args },
}),
true,
);
}
// Parsed with 'NewExpression' production.
return Ok(box Expr {
span: span!(start),
node: ExprKind::New { callee, args: None },
});
}
if eat!("super") {
let base = ExprOrSuper::Super(span!(start));
return self.parse_subscripts(base, true);
}
let obj = self.parse_primary_expr().map(ExprOrSuper::Expr)?;
self.parse_subscripts(obj, true)
}
/// Parse `NewExpresion`.
/// This includes `MemberExpression`.
fn parse_new_expr(&mut self) -> PResult<Box<Expr>> {
self.parse_member_expr_or_new_expr(true)
}
/// Parse `Arguments[Yield, Await]`
pub(super) fn parse_args(&mut self) -> PResult<Vec<ExprOrSpread>> {
expect!('(');
let mut first = true;
let mut expr_or_spreads = vec![];
while !eof!() && !is!(')') {
if first {
first = false;
} else {
expect!(',');
// Handle trailing comma.
if is!(')') {
break;
}
}
expr_or_spreads.push(self.include_in_expr(true).parse_expr_or_spread()?);
}
expect!(')');
Ok(expr_or_spreads)
}
/// AssignmentExpression[+In, ?Yield, ?Await]
/// ...AssignmentExpression[+In, ?Yield, ?Await]
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<ExprOrSpread> {
if eat!("...") {
self.include_in_expr(true)
.parse_assignment_expr()
.map(ExprOrSpread::Spread)
} else {
self.parse_assignment_expr().map(ExprOrSpread::Expr)
}
}
/// Parse paren expression or arrow function expression.
///
fn parse_paren_expr_or_arrow_fn(&mut self, can_be_arrow: bool) -> PResult<Box<Expr>> {
let start = cur_pos!();
// At this point, we can't know if it's parenthesized
// expression or head of arrow function.
// But as all patterns of javascript is subset of
// expressions, we can parse both as expression.
let expr_or_spreads = self.include_in_expr(true).parse_args_or_pats()?;
// we parse arrow function at here, to handle it efficiently.
if is!("=>") {
if !can_be_arrow {
unexpected!();
}
assert_and_bump!("=>");
let params = self.parse_exprs_as_params(expr_or_spreads)?;
let body: BlockStmtOrExpr = self.parse_fn_body(false, false)?;
return Ok(box Expr {
span: span!(start),
node: ExprKind::Arrow {
is_async: false,
is_generator: false,
params,
body,
},
});
}
// It was not head of arrow function.
// ParenthesizedExpression cannot contain spread.
if expr_or_spreads.len() == 0 {
syntax_error!(SyntaxError::EmptyParenExpr)
} else if expr_or_spreads.len() == 1 {
let expr = match expr_or_spreads.into_iter().next().unwrap() {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
ExprOrSpread::Expr(expr) => expr,
};
return Ok(box Expr {
node: ExprKind::Paren(expr),
span: span!(start),
});
} else {
assert!(expr_or_spreads.len() >= 2);
let mut exprs = Vec::with_capacity(expr_or_spreads.len());
for expr in expr_or_spreads {
match expr {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
ExprOrSpread::Expr(expr) => exprs.push(expr),
}
}
assert!(exprs.len() >= 2);
// span of sequence expression should not include '(' and ')'
let seq_expr = box Expr {
span: Span {
start: exprs.first().unwrap().span.start,
end: exprs.last().unwrap().span.end,
},
node: ExprKind::Seq { exprs },
};
return Ok(box Expr {
span: span!(start),
node: ExprKind::Paren(seq_expr),
});
}
}
fn parse_tpl_lit(&mut self, tag: Option<Box<Expr>>) -> PResult<TplLit> {
assert_and_bump!('`');
let is_tagged = tag.is_some();
let mut exprs = vec![];
let cur_elem = self.parse_tpl_element(is_tagged)?;
let mut is_tail = cur_elem.tail;
let mut quasis = vec![cur_elem];
while !is_tail {
expect!("${");
exprs.push(self.include_in_expr(true).parse_expr()?);
expect!('}');
let elem = self.parse_tpl_element(is_tagged)?;
is_tail = elem.tail;
quasis.push(elem);
}
expect!('`');
Ok(TplLit { tag, exprs, quasis })
}
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<TplElement> {
let raw = match *cur!()? {
Template(_) => match bump!() {
Template(s) => s,
_ => unreachable!(),
},
_ => unexpected!(),
};
let tail = is!('`');
Ok(TplElement {
raw,
tail,
// FIXME
cooked: false,
})
}
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<Box<Expr>> {
loop {
obj = match self.parse_subscript(obj, no_call)? {
(expr, false) => return Ok(expr),
(expr, true) => ExprOrSuper::Expr(expr),
}
}
}
/// returned bool is true if this method should be called again.
fn parse_subscript(&mut self, obj: ExprOrSuper, no_call: bool) -> PResult<(Box<Expr>, bool)> {
let start = cur_pos!();
// member expression
// $obj.name
if eat!('.') {
let prop: Box<Expr> = self.parse_ident_name().map(From::from)?;
return Ok((
box Expr {
span: span!(start),
node: ExprKind::Member {
obj,
prop,
computed: false,
},
},
true,
));
}
// $obj[name()]
if eat!('[') {
let prop = self.include_in_expr(true).parse_expr()?;
expect!(']');
return Ok((
box Expr {
span: span!(start),
node: ExprKind::Member {
obj,
prop,
computed: true,
},
},
true,
));
}
if !no_call && is!('(') {
let args = self.parse_args()?;
return Ok((
box Expr {
span: span!(start),
node: ExprKind::Call { callee: obj, args },
},
true,
));
}
match obj {
ExprOrSuper::Expr(expr) => {
// MemberExpression[?Yield, ?Await] TemplateLiteral[?Yield, ?Await, +Tagged]
if is!('`') {
let tpl = self.parse_tpl_lit(Some(expr))?;
return Ok((
box Expr {
span: span!(start),
node: ExprKind::Tpl(tpl),
},
true,
));
}
Ok((expr, false))
}
ExprOrSuper::Super(..) => {
if no_call {
unexpected!()
}
unexpected!()
}
}
}
/// Parse call, dot, and `[]`-subscript expressions.
///
///
pub(super) fn parse_lhs_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
// `super()` can't be handled from parse_new_expr()
if eat!("super") {
let obj = ExprOrSuper::Super(span!(start));
return self.parse_subscripts(obj, false);
}
let callee = self.parse_new_expr()?;
return_if_arrow!(callee);
match callee.node {
// If this is parsed using 'NewExpression' rule, just return it.
ExprKind::New { args: None, .. } => {
assert_ne!(
cur!(),
Some(&LParen),
"parse_new_expr() should eat paren if it exists"
);
return Ok(callee);
}
_ => {}
}
// 'CallExpr' rule contains 'MemberExpr (...)',
// and 'MemberExpr' rule contains 'new MemberExpr (...)'
if is!('(') {
// This is parsed using production MemberExpression,
// which is left-recursive.
let args = self.parse_args()?;
let call_expr = box Expr {
span: span!(start),
node: ExprKind::Call {
callee: ExprOrSuper::Expr(callee),
args,
},
};
return self.parse_subscripts(ExprOrSuper::Expr(call_expr), false);
}
// This is parsed using production 'NewExpression', which contains
// 'MemberExpression'
Ok(callee)
}
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<Box<Expr>> {
self.parse_expr()
}
pub(super) fn parse_args_or_pats(&mut self) -> PResult<Vec<ExprOrSpread>> {
self.parse_args()
}
}
/// simple leaf methods.
#[parser]
impl<I: Input> Parser<I> {
fn parse_yield_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
assert_and_bump!("yield");
assert!(self.ctx.in_generator);
//TODO
// Spec says
// YieldExpression cannot be used within the FormalParameters of a generator
// function because any expressions that are part of FormalParameters are
// evaluated before the resulting generator object is in a resumable state.
if is!(';') || (!is!('*') && !cur!().map(Token::starts_expr).unwrap_or(true)) {
Ok(ExprKind::Yield {
arg: None,
delegate: false,
})
} else {
let has_star = eat!('*');
let arg = self.parse_assignment_expr()?;
Ok(ExprKind::Yield {
arg: Some(arg),
delegate: has_star,
})
}
})
}
/// 12.2.5 Array Initializer
fn parse_lit(&mut self) -> PResult<Lit> {
let v = match *cur!()? {
Word(Null) => {
bump!();
Lit::Null
}
Word(True) | Word(False) => {
let v = is!("true");
bump!();
Lit::Bool(v)
}
Str(..) => match bump!() {
//FIXME
Str(s, _) => Lit::Str(s),
_ => unreachable!(),
},
Num(..) => match bump!() {
Num(num) => Lit::Num(num),
_ => unreachable!(),
},
_ => unreachable!("parse_lit should not be called"),
};
Ok(v)
}
}

View File

@ -0,0 +1,234 @@
//! Parser for unary operations and binary operations.
use super::*;
#[parser]
impl<I: Input> Parser<I> {
/// Name from spec: 'LogicalORExpression'
pub(super) fn parse_bin_expr(&mut self) -> PResult<Box<Expr>> {
let left = self.parse_unary_expr()?;
return_if_arrow!(left);
self.parse_bin_op_recursively(left, 0)
}
/// Parse binary operators with the operator precedence parsing
/// algorithm. `left` is the left-hand side of the operator.
/// `minPrec` provides context that allows the function to stop and
/// defer further parser to one of its callers when it encounters an
/// operator that has a lower precedence than the set it is parsing.
fn parse_bin_op_recursively(&mut self, left: Box<Expr>, min_prec: u8) -> PResult<Box<Expr>> {
let op = match {
// Return left on eof
match cur!() {
Some(cur) => cur,
None => return Ok(left),
}
} {
&Word(Keyword(In)) if self.ctx.include_in_expr => BinaryOp::In,
&Word(Keyword(InstanceOf)) => BinaryOp::InstanceOf,
&BinOp(op) => op.into(),
_ => {
return Ok(left);
}
};
if op.precedence() <= min_prec {
trace!(
self.logger,
"returning {:?} without parsing {:?} because min_prec={}, prec={}",
left,
op,
min_prec,
op.precedence()
);
return Ok(left);
}
bump!();
trace!(
self.logger,
"parsing binary op {:?} min_prec={}, prec={}",
op,
min_prec,
op.precedence()
);
match left.node {
// This is invalid syntax.
ExprKind::Unary { .. } if op == BinaryOp::Exp => {
// Correct implementation would be returning Ok(left) and
// returning "unexpected token '**'" on next.
// But it's not useful error message.
syntax_error!(SyntaxError::UnaryInExp)
}
_ => {}
}
let right = {
let left_of_right = self.parse_unary_expr()?;
self.parse_bin_op_recursively(
left_of_right,
if op == BinaryOp::Exp {
// exponential operator is right associative
op.precedence() - 1
} else {
op.precedence()
},
)?
};
let node = box Expr {
span: span!(left.span.start),
node: ExprKind::Binary { op, left, right },
};
let expr = self.parse_bin_op_recursively(node, min_prec)?;
Ok(expr)
}
/// Parse unary expression and update expression.
///
/// spec: 'UnaryExpression'
fn parse_unary_expr(&mut self) -> PResult<Box<Expr>> {
let start = cur_pos!();
// Parse update expression
if is!("++") || is!("--") {
let op = if bump!() == PlusPlus {
UpdateOp::PlusPlus
} else {
UpdateOp::MinusMinus
};
let arg = self.parse_unary_expr()?;
return Ok(box Expr {
span: span!(start),
node: ExprKind::Update {
prefix: true,
op,
arg,
},
});
}
// Parse unary expression
if is_one_of!("delete", "void", "typeof", '+', '-', '~', '!') {
let op = match bump!() {
Word(Keyword(Delete)) => UnaryOp::Delete,
Word(Keyword(Void)) => UnaryOp::Void,
Word(Keyword(TypeOf)) => UnaryOp::TypeOf,
BinOp(Add) => UnaryOp::Plus,
BinOp(Sub) => UnaryOp::Minus,
Tilde => UnaryOp::Tilde,
Bang => UnaryOp::Bang,
_ => unreachable!(),
};
let arg = self.parse_unary_expr()?;
return Ok(box Expr {
span: span!(start),
node: ExprKind::Unary {
prefix: true,
op,
arg,
},
});
}
if self.ctx.in_async && is!("await") {
return self.parse_await_expr();
}
// UpdateExpression
let expr = self.parse_lhs_expr()?;
return_if_arrow!(expr);
// Line terminator isn't allowed here.
if self.input.had_line_break_before_cur() {
return Ok(expr);
}
if is_one_of!("++", "--") {
let start = cur_pos!();
let op = if bump!() == PlusPlus {
UpdateOp::PlusPlus
} else {
UpdateOp::MinusMinus
};
return Ok(box Expr {
span: span!(start),
node: ExprKind::Update {
prefix: false,
op,
arg: expr,
},
});
}
Ok(expr)
}
fn parse_await_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
assert_and_bump!("await");
assert!(self.ctx.in_async);
if is!('*') {
syntax_error!(SyntaxError::AwaitStar)
}
let arg = self.parse_unary_expr()?;
Ok(ExprKind::Await { arg })
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
fn bin(s: &'static str) -> Box<Expr> {
let expr = mk(s).parse_bin_expr();
expr.unwrap_or_else(|err| {
panic!("failed to parse '{}' as a binary expression: {:?}", s, err)
})
}
#[test]
fn simple() {
assert_eq_ignore_span!(
bin("5 + 4 * 7"),
box Expr {
span: Default::default(),
node: ExprKind::Binary {
op: BinaryOp::Add,
left: bin("5"),
right: bin("4 * 7"),
},
}
);
}
#[test]
fn same_prec() {
assert_eq_ignore_span!(
bin("5 + 4 + 7"),
box Expr {
span: Default::default(),
node: ExprKind::Binary {
op: BinaryOp::Add,
left: bin("5 + 4"),
right: bin("7"),
},
}
);
}
}

View File

@ -0,0 +1,184 @@
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
fn lhs(s: &'static str) -> Box<Expr> {
mk(s)
.parse_lhs_expr()
.expect("failed to parse lhs expression")
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
#[test]
fn new_expr_should_not_eat_too_much() {
assert_eq_ignore_span!(
mk("new Date().toString()").parse_new_expr().unwrap(),
box Expr {
span: Default::default(),
node: ExprKind::Member {
obj: mk("new Date()")
.parse_member_expr()
.map(ExprOrSuper::Expr)
.unwrap(),
prop: Ident {
sym: "toString".into(),
span: Default::default(),
}.into(),
computed: false,
},
}
);
}
#[test]
fn lhs_expr_as_new_expr_prod() {
assert_eq_ignore_span!(
lhs("new Date.toString()"),
box Expr {
span: Default::default(),
node: ExprKind::New {
callee: lhs("Date.toString"),
args: Some(vec![]),
},
}
);
}
#[test]
fn lhs_expr_as_call() {
assert_eq_ignore_span!(
lhs("new Date.toString()()"),
box Expr {
span: Default::default(),
node: ExprKind::Call {
callee: ExprOrSuper::Expr(lhs("new Date.toString()")),
args: vec![],
},
}
)
}
#[test]
fn arrow_fn_no_args() {
assert_eq_ignore_span!(
expr("() => 1"),
box Expr {
span,
node: ExprKind::Arrow {
is_async: false,
is_generator: false,
params: vec![],
body: BlockStmtOrExpr::Expr(expr("1")),
},
}
);
}
#[test]
fn arrow_fn() {
assert_eq_ignore_span!(
expr("(a) => 1"),
box Expr {
span,
node: ExprKind::Arrow {
is_async: false,
is_generator: false,
params: vec![
Pat {
span,
node: PatKind::Ident(Ident {
span,
sym: "a".into(),
}),
},
],
body: BlockStmtOrExpr::Expr(expr("1")),
},
}
);
}
#[test]
fn arrow_fn_rest() {
assert_eq_ignore_span!(
expr("(...a) => 1"),
box Expr {
span,
node: ExprKind::Arrow {
is_async: false,
is_generator: false,
params: vec![
Pat {
span,
node: PatKind::Rest(box Pat {
span,
node: PatKind::Ident(Ident {
span,
sym: "a".into(),
}),
}),
},
],
body: BlockStmtOrExpr::Expr(expr("1")),
},
}
);
}
#[test]
fn arrow_fn_no_paren() {
assert_eq_ignore_span!(
expr("a => 1"),
box Expr {
span,
node: ExprKind::Arrow {
is_async: false,
is_generator: false,
params: vec![
Pat {
span,
node: PatKind::Ident(Ident {
span,
sym: "a".into(),
}),
},
],
body: BlockStmtOrExpr::Expr(expr("1")),
},
}
);
}
#[test]
fn new_no_paren() {
assert_eq_ignore_span!(
expr("new a"),
box Expr {
span,
node: ExprKind::New {
callee: expr("a"),
args: None,
},
}
);
}
#[test]
fn new_new_no_paren() {
assert_eq_ignore_span!(
expr("new new a"),
box Expr {
span,
node: ExprKind::New {
callee: expr("new a"),
args: None,
},
}
);
}

View File

@ -0,0 +1,106 @@
//! 12.1 Identifiers
use super::*;
#[parser]
impl<I: Input> Parser<I> {
/// IdentifierReference
pub(super) fn parse_ident_ref(&mut self) -> PResult<Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// LabelIdentifier
pub(super) fn parse_label_ident(&mut self) -> PResult<Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// Use this when spec says "IdentiferName".
/// This allows idents like `catch`.
pub(super) fn parse_ident_name(&mut self) -> PResult<Ident> {
spanned!({
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
_ => syntax_error!(SyntaxError::ExpectedIdent),
};
Ok(w.into())
})
}
/// Identifier
///
/// In strict mode, "yield" is SyntaxError if matched.
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<Ident> {
spanned!({
let strict = self.ctx.strict;
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
_ => syntax_error!(SyntaxError::ExpectedIdent),
};
// Spec:
// It is a Syntax Error if this phrase is contained in strict mode code and the
// StringValue of IdentifierName is: "implements", "interface", "let",
// "package", "private", "protected", "public", "static", or "yield".
if strict {
match w {
Keyword(Yield)
| Ident(js_word!("static"))
| Ident(js_word!("implements"))
| Ident(js_word!("interface"))
| Ident(js_word!("let"))
| Ident(js_word!("package"))
| Ident(js_word!("private"))
| Ident(js_word!("protected"))
| Ident(js_word!("public")) => syntax_error!(SyntaxError::InvalidIdentInStrict),
_ => {}
}
}
//TODO
// Spec:
// It is a Syntax Error if the goal symbol of the syntactic grammar is Module
// and the StringValue of IdentifierName is "await".
//TODO
// Spec:
// It is a Syntax Error if StringValue of IdentifierName is the same String
// value as the StringValue of any ReservedWord except for yield or await.
match w {
Keyword(Let) => Ok(w.into()),
Ident(ident) => Ok(ident),
Keyword(Yield) if incl_yield => Ok(js_word!("yield")),
Keyword(Await) if incl_await => Ok(js_word!("await")),
Keyword(..) | Null | True | False => {
println!("Word: {:?}", w);
syntax_error!(SyntaxError::ExpectedIdent)
}
}
})
}
}
pub(super) trait MaybeOptionalIdentParser<Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident>;
}
impl<I: Input> MaybeOptionalIdentParser<Ident> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident> {
self.parse_binding_ident()
}
}
impl<I: Input> MaybeOptionalIdentParser<Option<Ident>> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
self.parse_opt_binding_ident()
}
}

View File

@ -0,0 +1,162 @@
//! Note: this module requires `#![feature(nll)]`.
use swc_common::{BytePos, Span};
use token::*;
/// Input for parser.
pub trait Input: Iterator<Item = TokenAndSpan> {
fn had_line_break_before_last(&self) -> bool;
}
/// This struct is responsible for managing current token and peeked token.
pub(super) struct ParserInput<I: Input> {
iter: ItemIter<I>,
cur: Option<Item>,
/// Last of previous span
last_pos: BytePos,
/// Peeked token
next: Option<Item>,
}
/// One token
#[derive(Debug)]
struct Item {
token: Token,
/// Had a line break before this token?
had_line_break: bool,
span: Span,
}
struct ItemIter<I: Input>(I);
impl<I: Input> ItemIter<I> {
fn next(&mut self) -> Option<Item> {
match self.0.next() {
Some(TokenAndSpan { token, span }) => Some(Item {
token,
span,
had_line_break: self.0.had_line_break_before_last(),
}),
None => None,
}
}
}
impl<I: Input> ParserInput<I> {
pub const fn new(lexer: I) -> Self {
ParserInput {
iter: ItemIter(lexer),
cur: None,
last_pos: BytePos(0),
next: None,
}
}
fn bump_inner(&mut self) -> Option<Token> {
let prev = self.cur.take();
self.last_pos = prev.as_ref()
.map(|item| item.span.end)
.unwrap_or(BytePos(0));
// If we have peeked a token, take it instead of calling lexer.next()
self.cur = self.next.take().or_else(|| self.iter.next());
prev.map(|it| it.token)
}
pub fn cur_debug(&self) -> Option<&Token> {
self.cur.as_ref().map(|it| &it.token)
}
/// Returns current token.
pub fn bump(&mut self) -> Token {
self.bump_inner().expect(
"Current token is `None`. Parser should not call bump()\
without knowing current token",
)
}
pub fn knows_cur(&self) -> bool {
self.cur.is_some()
}
pub fn peek(&mut self) -> Option<&Token> {
assert!(
self.cur.is_some(),
"parser should not call peek() without knowing current token"
);
if self.next.is_none() {
self.next = self.iter.next();
}
self.next.as_ref().map(|ts| &ts.token)
}
/// This returns true on eof.
pub fn had_line_break_before_cur(&self) -> bool {
self.cur
.as_ref()
.map(|it| it.had_line_break)
.unwrap_or(true)
}
/// This returns true on eof.
pub fn has_linebreak_between_cur_and_peeked(&mut self) -> bool {
let _ = self.peek();
self.next
.as_ref()
.map(|item| item.had_line_break)
.unwrap_or({
// return true on eof.
true
})
}
/// Get current token. Returns `None` only on eof.
pub fn cur(&mut self) -> Option<&Token> {
if self.cur.is_none() {
self.bump_inner();
}
self.cur.as_ref().map(|item| &item.token)
}
pub fn is(&mut self, expected: &Token) -> bool {
match self.cur() {
Some(t) => *expected == *t,
_ => false,
}
}
pub fn peeked_is(&mut self, expected: &Token) -> bool {
match self.peek() {
Some(t) => *expected == *t,
_ => false,
}
}
pub fn eat(&mut self, expected: &Token) -> bool {
match self.cur() {
Some(t) => {
if *expected == *t {
self.bump();
true
} else {
false
}
}
_ => false,
}
}
pub fn eat_keyword(&mut self, kwd: Keyword) -> bool {
self.eat(&Word(Keyword(kwd)))
}
/// Returns start of current token.
pub fn cur_pos(&self) -> BytePos {
self.cur
.as_ref()
.map(|item| item.span.start)
.unwrap_or(BytePos(0))
}
/// Returns last of previous token.
pub const fn last_pos(&self) -> BytePos {
self.last_pos
}
}

View File

@ -0,0 +1,191 @@
macro_rules! unexpected {
($p:expr) => {{
let pos = cur_pos!($p);
let cur = cur!($p);
unimplemented!("unexpected token: {:?} at {:?}", cur, pos);
}};
}
macro_rules! syntax_error {
($p:expr, $s:expr) => {{
let err = Error::Syntax($p.input.cur().cloned(), cur_pos!($p), $s, file!(), line!());
error!($p.logger, "failed to parse: {:?}", err);
let res: PResult<!> = Err(err);
res?
}};
}
/// This handles automatic semicolon insertion.
///
/// Returns bool.
macro_rules! is {
($p:expr, BindingIdent) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.ctx.strict),
_ => false,
}
}};
($p:expr, IdentRef) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.ctx.strict),
_ => false,
}
}};
($p:expr, IdentName) => {{
match cur!($p) {
Some(&Word(..)) => true,
_ => false,
}
}};
($p:expr, ';') => {{
$p.input.is(&Token::Semi) || cur!($p) == None || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
($p:expr, $t:tt) => {
$p.input.is(&tok!($t))
};
}
/// Returns true on eof.
macro_rules! eof {
($p:expr) => {
cur!($p) == None
};
}
macro_rules! peeked_is {
($p:expr, $t:tt) => {
$p.input.peeked_is(&tok!($t))
};
}
macro_rules! is_one_of {
($p:expr, $($t:tt),+) => {{
false
$(
|| is!($p, $t)
)*
}};
}
// This will panic if current != token
macro_rules! assert_and_bump {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &tok!($t);
if !$p.input.is(TOKEN) {
unreachable!("assertion failed: expected {:?}, got {:?}", TOKEN, $p.input.cur());
}
bump!($p);
}};
}
/// This handles automatic semicolon insertion.
///
/// Returns bool if token is static, and Option<Token>
/// if token has data like string.
macro_rules! eat {
($p:expr, ';') => {{
debug!($p.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || cur!($p) == None || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
($p:expr, $t:tt) => {{
const TOKEN: &Token = &tok!($t);
if is!($p, $t) {
bump!($p);
true
} else {
false
}
}};
}
macro_rules! eat_exact {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &token_including_semi!($t);
if $p.input.is(TOKEN) {
bump!($p);
true
} else {
false
}
}};
}
/// This handles automatic semicolon insertion.
///
macro_rules! expect {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &token_including_semi!($t);
if !eat!($p, $t) {
syntax_error!($p, SyntaxError::Expected(TOKEN))
}
}};
}
macro_rules! expect_exact {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &token_including_semi!($t);
if !eat_exact!($p, $t) {
syntax_error!($p, SyntaxError::Expected(TOKEN))
}
}};
}
macro_rules! cur {
($parser:expr) => {
$parser.input.cur()
};
}
macro_rules! peek {
($p:expr) => {{
assert!(
$p.input.knows_cur(),
"parser should not call peek() without knowing current token.
Current token is {:?}", cur!($p)
);
$p.input.peek()
}};
}
macro_rules! bump {
($p:expr) => {{
assert!(
$p.input.knows_cur(),
"parser should not call bump() without knowing current token"
);
$p.input.bump()
}};
}
macro_rules! cur_pos {
($p:expr) => { $p.input.cur_pos() }
}
macro_rules! last_pos {
($p:expr) => { $p.input.last_pos()};
}
macro_rules! return_if_arrow {
($p:expr, $expr:expr) => {{
let is_cur = match $p.state.potential_arrow_start {
Some(start) => $expr.span.start == start,
None => false
};
if is_cur {
match $expr.node {
ExprKind::Arrow{..} => return Ok($expr),
_ => {},
}
}
}};
}

View File

@ -0,0 +1,110 @@
#![allow(dead_code, unused_variables)]
#![deny(non_snake_case)]
pub use self::input::Input;
use self::input::ParserInput;
use self::util::ParseObject;
use ast::*;
use error::SyntaxError;
use parser_macros::parser;
use slog::Logger;
use std::ops::{Deref, DerefMut};
use std::option::NoneError;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use token::*;
#[macro_use]
mod macros;
mod class_and_fn;
mod object;
mod expr;
mod ident;
mod stmt;
mod pat;
pub mod input;
mod util;
pub type PResult<T> = Result<T, Error>;
#[derive(Debug)]
pub enum Error {
Eof,
Syntax(Option<Token>, BytePos, SyntaxError, &'static str, u32),
}
impl From<NoneError> for Error {
fn from(_: NoneError) -> Self {
Error::Eof
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct Config {}
/// EcmaScript parser.
pub struct Parser<I: Input> {
logger: Logger,
cfg: Config,
ctx: Context,
state: State,
input: ParserInput<I>,
}
#[derive(Debug, Clone, Copy, Default)]
struct Context {
strict: bool,
include_in_expr: bool,
/// If true, await expression is parsed, and "await" is treated as a
/// keyword.
in_async: bool,
/// If true, yield expression is parsed, and "yield" is treated as a
/// keyword.
in_generator: bool,
in_module: bool,
}
#[derive(Debug, Default)]
struct State {
labels: Vec<JsWord>,
/// Start position of an assignment expression.
potential_arrow_start: Option<BytePos>,
}
impl<I: Input> Parser<I> {
pub fn new_for_module(logger: Logger, lexer: I) -> Self {
Parser {
logger,
input: ParserInput::new(lexer),
ctx: Context {
strict: true,
in_module: true,
..Default::default()
},
cfg: Default::default(),
state: Default::default(),
}
}
pub fn new_for_script(logger: Logger, lexer: I, strict: bool) -> Self {
Parser {
logger,
input: ParserInput::new(lexer),
ctx: Context {
strict,
..Default::default()
},
cfg: Default::default(),
state: Default::default(),
}
}
#[parser]
pub fn parse_script(&mut self) -> PResult<Vec<Stmt>> {
self.parse_block_body(true, None)
}
#[parser]
pub fn parse_module(&mut self) -> PResult<Module> {
self.parse_block_body(true, None)
.map(|body| Module { body })
}
}

View File

@ -0,0 +1,237 @@
//! Parser for object literal.
use super::*;
#[parser]
impl<I: Input> Parser<I> {
/// Parse a object literal or object pattern.
pub(super) fn parse_object<T>(&mut self) -> PResult<T>
where
Self: ParseObject<T>,
{
let start = cur_pos!();
assert_and_bump!('{');
let mut props = vec![];
let mut first = true;
while !eat!('}') {
// Handle comma
if first {
first = false;
} else {
expect!(',');
if eat!('}') {
break;
}
}
let prop = self.parse_object_prop()?;
props.push(prop);
}
Ok(Self::make_object(span!(start), props))
}
/// spec: 'PropertyName'
pub(super) fn parse_prop_name(&mut self) -> PResult<PropName> {
let start = cur_pos!();
let v = match *cur!()? {
Str(_, _) => match bump!() {
Str(s, _) => PropName::Str(s),
_ => unreachable!(),
},
Num(_) => match bump!() {
Num(n) => PropName::Num(n),
_ => unreachable!(),
},
Word(..) => match bump!() {
Word(w) => PropName::Ident(Ident {
sym: w.into(),
span: span!(start),
}),
_ => unreachable!(),
},
LBracket => {
bump!();
let expr = self.include_in_expr(true)
.parse_assignment_expr()
.map(PropName::Computed)?;
expect!(']');
expr
}
_ => unexpected!(),
};
Ok(v)
}
}
#[parser]
impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
type Prop = Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Box<Expr> {
box Expr {
span,
node: ExprKind::Object { props },
}
}
/// spec: 'PropertyDefinition'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
let start = cur_pos!();
// Parse as 'MethodDefinition'
if eat!('*') {
let name = self.parse_prop_name()?;
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, false, true)
.map(|function| Prop {
span: span!(start),
node: PropKind::Method {
key: name,
function,
},
});
}
let key = self.parse_prop_name()?;
//
// {[computed()]: a,}
// { 'a': a, }
// { 0: 1, }
// { a: expr, }
if eat!(':') {
let value = self.include_in_expr(true).parse_assignment_expr()?;
return Ok(Prop {
span: Span {
start,
end: value.span.end,
},
node: PropKind::KeyValue { key, value },
});
}
// Handle `a(){}` (and async(){} / get(){} / set(){})
if is!('(') {
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, false, false)
.map(|function| Prop {
span: span!(start),
node: PropKind::Method { key, function },
});
}
let mut ident = match key {
PropName::Ident(ident) => ident,
_ => unexpected!(),
};
// `ident` from parse_prop_name is parsed as 'IdentifierName'
// It means we should check for invalid expressions like { for, }
if is_one_of!('=', ',', '}') {
let is_reserved_word = {
// FIXME: Use extension trait instead of this.
let word = Word::from(ident.sym);
let r = word.is_reserved_word(self.ctx.strict);
ident = Ident {
sym: word.into(),
..ident
};
r
};
if is_reserved_word {
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat)
}
if eat!('=') {
let value = self.include_in_expr(true).parse_assignment_expr()?;
return Ok(Prop {
span: span!(start),
node: PropKind::Assign { key: ident, value },
});
}
return Ok(Prop::new_shorthand(ident));
}
// get a(){}
// set a(v){}
// async a(){}
match ident.sym {
js_word!("get") | js_word!("set") | js_word!("async") => {
let key = self.parse_prop_name()?;
return match ident.sym {
js_word!("get") => self.parse_fn_args_body(start, |_| Ok(vec![]), false, false)
.map(|Function { body, .. }| Prop {
span: span!(start),
node: PropKind::Getter { key, body },
}),
js_word!("set") => self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
false,
false,
).map(|Function { params, body, .. }| {
assert_eq!(params.len(), 1);
Prop {
span: span!(start),
node: PropKind::Setter {
key,
body,
param: params.into_iter().next().unwrap(),
},
}
}),
js_word!("async") => self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
true,
false,
).map(|function| Prop {
span: span!(start),
node: PropKind::Method { key, function },
}),
_ => unreachable!(),
};
}
_ => unexpected!(),
}
}
}
#[parser]
impl<I: Input> ParseObject<Pat> for Parser<I> {
type Prop = ObjectPatProp;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Pat {
Pat {
span,
node: PatKind::Object { props },
}
}
/// Production 'BindingProperty'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
let key = self.parse_prop_name()?;
if eat!(':') {
let value = box self.parse_binding_element()?;
return Ok(ObjectPatProp::KeyValue { key, value });
}
let key = match key {
PropName::Ident(ident) => ident,
_ => unexpected!(),
};
let value = if eat!('=') {
self.include_in_expr(true)
.parse_assignment_expr()
.map(Some)?
} else {
None
};
Ok(ObjectPatProp::Assign { key, value })
}
}

View File

@ -0,0 +1,306 @@
//! 13.3.3 Destructuring Binding Patterns
use super::*;
use std::iter;
#[parser]
impl<I: Input> Parser<I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
if is!(BindingIdent) {
self.parse_binding_ident().map(Some)
} else {
Ok(None)
}
}
/// babel: `parseBindingIdentifier`
///
/// spec: `BindingIdentifier`
pub(super) fn parse_binding_ident(&mut self) -> PResult<Ident> {
// "yield" and "await" is **lexically** accepted.
let ident = self.parse_ident(true, true)?;
if self.ctx.strict {
if &*ident.sym == "arguments" || &*ident.sym == "eval" {
syntax_error!(SyntaxError::EvalAndArgumentsInStrict)
}
}
Ok(ident)
}
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<Pat> {
match *cur!()? {
tok!("yield") | Word(..) => self.parse_binding_ident().map(Pat::from),
tok!('[') => self.parse_array_binding_pat(),
tok!('{') => self.parse_object(),
tok!('(') => {
bump!();
let pat = self.parse_binding_pat_or_ident()?;
expect!(')');
Ok(pat)
}
_ => unexpected!(),
}
}
/// babel: `parseBindingAtom`
pub(super) fn parse_binding_element(&mut self) -> PResult<Pat> {
let start = cur_pos!();
let left = self.parse_binding_pat_or_ident()?;
if eat!('=') {
let right = self.include_in_expr(true).parse_assignment_expr()?;
return Ok(Pat {
span: span!(start),
node: PatKind::Assign {
left: box left,
right,
},
});
}
Ok(left)
}
fn parse_array_binding_pat(&mut self) -> PResult<Pat> {
spanned!({
assert_and_bump!('[');
let mut elems = vec![];
let mut comma = 0;
while !eof!() && !is!(']') {
if eat!(',') {
comma += 1;
continue;
}
elems.extend(iter::repeat(None).take(comma));
comma = 0;
let start = cur_pos!();
if eat!("...") {
let pat = self.parse_binding_pat_or_ident()?;
let pat = Pat {
span: span!(start),
node: PatKind::Rest(box pat),
};
elems.push(Some(pat));
break;
} else {
elems.push(self.parse_binding_element().map(Some)?);
}
}
expect!(']');
Ok(PatKind::Array(elems))
})
}
/// spec: 'FormalParameter'
pub(super) fn parse_formal_param(&mut self) -> PResult<Pat> {
self.parse_binding_element()
}
///
/// spec: 'FormalParameterList'
pub(super) fn parse_formal_params(&mut self) -> PResult<Vec<Pat>> {
let mut first = true;
let mut params = vec![];
while !eof!() && !is!(')') {
if first {
first = false;
} else {
expect!(',');
// Handle trailing comma.
if is!(')') {
break;
}
}
let start = cur_pos!();
let rest = eat!("...");
if rest {
let pat = self.parse_binding_pat_or_ident()?;
let pat = Pat {
span: span!(start),
node: PatKind::Rest(box pat),
};
params.push(pat);
break;
} else {
params.push(self.parse_binding_element()?);
}
}
Ok(params)
}
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<Vec<Pat>> {
// FIXME: This is wrong.
self.parse_formal_params()
}
}
#[parser]
impl<I: Input> Parser<I> {
/// This does not return 'rest' pattern because non-last parameter cannot be
/// rest.
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<Pat> {
let span = expr.span;
match expr.node {
ExprKind::Member { .. } | ExprKind::Call { .. } | ExprKind::New { .. } => {
return Ok(Pat {
span,
node: PatKind::Expr(box expr),
})
}
ExprKind::Paren(inner) => {
// FIXME: Check if this is correct?
let inner_pat = self.reparse_expr_as_pat(inner)?;
return Ok(Pat {
span,
node: inner_pat.node,
});
}
ExprKind::Assign {
left,
op: Assign,
right,
} => {
return Ok(Pat {
span,
node: PatKind::Assign {
left: match left {
PatOrExpr::Expr(left) => box self.reparse_expr_as_pat(left)?,
PatOrExpr::Pat(left) => box left,
},
right,
},
})
}
ExprKind::Object { props } => {
// {}
return Ok(Pat {
span,
node: PatKind::Object {
props: props
.into_iter()
.map(|prop| {
match prop.node {
PropKind::Shorthand(id) => Ok(ObjectPatProp::Assign {
key: id.into(),
value: None,
}),
PropKind::KeyValue { key, value } => Ok(ObjectPatProp::KeyValue {
key,
value: box self.reparse_expr_as_pat(value)?,
}),
PropKind::Assign { key, value } => Ok(ObjectPatProp::Assign {
key,
value: Some(value),
}),
_ => {
unimplemented!("error reporting: object pattern cannot contain method property: {:?}", prop)
}
}
})
.collect::<PResult<_>>()?,
},
});
}
ExprKind::Ident(ident) => return Ok(ident.into()),
ExprKind::Array { elems: mut exprs } => {
if exprs.len() == 0 {
return Ok(Pat {
span,
node: PatKind::Array(vec![]),
});
}
let len = exprs.len();
let mut params = Vec::with_capacity(exprs.len());
for expr in exprs.drain(..len - 1) {
match expr {
Some(ExprOrSpread::Spread(expr)) => {
syntax_error!(SyntaxError::NonLastRestParam)
}
Some(ExprOrSpread::Expr(expr)) => {
params.push(self.reparse_expr_as_pat(expr).map(Some)?)
}
None => params.push(None),
}
}
assert_eq!(exprs.len(), 1);
let expr = exprs.into_iter().next().unwrap();
let last = match expr {
// Rest
Some(ExprOrSpread::Spread(expr)) => {
// FIXME: Span should start from ...
let span = expr.span;
self.reparse_expr_as_pat(expr)
.map(|pat| Pat {
span,
node: PatKind::Rest(box pat),
})
.map(Some)?
}
Some(ExprOrSpread::Expr(expr)) => self.reparse_expr_as_pat(expr).map(Some)?,
// TODO: sytax error if last element is ellison and ...rest exists.
None => None,
};
params.push(last);
return Ok(Pat {
span,
node: PatKind::Array(params),
});
}
_ => unimplemented!("reparse_expr_as_pat: {:?}", expr),
}
}
pub(super) fn parse_exprs_as_params(
&mut self,
mut exprs: Vec<ExprOrSpread>,
) -> PResult<Vec<Pat>> {
let len = exprs.len();
if len == 0 {
return Ok(vec![]);
}
let mut params = Vec::with_capacity(len);
for expr in exprs.drain(..len - 1) {
match expr {
ExprOrSpread::Spread(expr) => syntax_error!(SyntaxError::NonLastRestParam),
ExprOrSpread::Expr(expr) => params.push(self.reparse_expr_as_pat(expr)?),
}
}
assert_eq!(exprs.len(), 1);
let expr = exprs.into_iter().next().unwrap();
let last = match expr {
// Rest
ExprOrSpread::Spread(expr) => {
let span = expr.span; //TODO
self.reparse_expr_as_pat(expr).map(|pat| Pat {
span,
node: PatKind::Rest(box pat),
})?
}
ExprOrSpread::Expr(expr) => self.reparse_expr_as_pat(expr)?,
};
params.push(last);
Ok(params)
}
}

View File

@ -0,0 +1,653 @@
use super::*;
use swc_macros::ast_node;
mod module_item;
#[parser]
impl<I: Input> Parser<I> {
pub(super) fn parse_block_body<Type>(
&mut self,
top_level: bool,
end: Option<&Token>,
) -> PResult<Vec<Type>>
where
Self: StmtLikeParser<Type>,
Type: From<Stmt>,
{
let mut stmts = vec![];
while {
let b = cur!() != end;
b
} {
let stmt = self.parse_stmt_like(true, top_level)?;
stmts.push(stmt);
}
if end.is_some() {
bump!();
}
Ok(stmts)
}
fn parse_stmt(&mut self, top_level: bool) -> PResult<Stmt> {
self.parse_stmt_internal(false, top_level)
}
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<Stmt> {
self.parse_stmt_internal(true, top_level)
}
/// Parse a statement, declaration or module item.
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<Type>
where
Self: StmtLikeParser<Type>,
Type: From<Stmt>,
{
if <Self as StmtLikeParser<Type>>::accept_import_export() {
if is_one_of!("import", "export") {
return self.handle_import_export(top_level);
}
}
self.parse_stmt_internal(include_decl, top_level)
.map(From::from)
}
fn parse_stmt_internal(&mut self, include_decl: bool, top_level: bool) -> PResult<Stmt> {
let start = cur_pos!();
match *cur!()? {
Word(Keyword(w)) => match w {
Break | Continue => {
return spanned!({
bump!();
let is_break = w == Break;
let label = if eat!(';') {
None
} else {
let i = self.parse_label_ident().map(Some)?;
expect!(';');
i
};
Ok(if is_break {
StmtKind::Break { label }
} else {
StmtKind::Continue { label }
})
})
}
Debugger => {
return spanned!({
bump!();
expect!(';');
Ok(StmtKind::Debugger)
})
}
Do => return self.parse_do_stmt(),
For => return self.parse_for_stmt(),
Function => {
if !include_decl {
unexpected!()
}
return self.parse_fn_decl().map(Stmt::from);
}
Class if !include_decl => unexpected!(),
Class => return self.parse_class_decl().map(Stmt::from),
If => return self.parse_if_stmt(),
Return => return self.parse_return_stmt(),
Switch => return self.parse_switch_stmt(),
Throw => return self.parse_throw_stmt(),
Try => return self.parse_try_stmt(),
// `let;` is identifier reference.
Let if include_decl => match peek!() {
Some(t) if t.follows_keyword_let(self.ctx.strict) => {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
_ => {}
},
Const | Var => {
if w == Var || include_decl {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
// Handle `let;` by forwarding it to expression statement
}
While => return self.parse_while_stmt(),
With => return self.parse_with_stmt(),
_ => {}
},
LBrace => return spanned!({ self.parse_block().map(StmtKind::Block) }),
Semi => {
return spanned!({
bump!();
Ok(StmtKind::Empty)
})
}
_ => {}
}
// Handle async function foo() {}
if is!("async") && peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
return self.parse_async_fn_decl().map(From::from);
}
// If the statement does not start with a statement keyword or a
// brace, it's an ExpressionStatement or LabeledStatement. We
// simply start parsing an expression, and afterwards, if the
// next token is a colon and the expression was a simple
// Identifier node, we switch to interpreting it as a label.
let expr = self.include_in_expr(true).parse_expr()?;
let expr = match expr {
box Expr {
span,
node: ExprKind::Ident(ident),
} => {
if eat!(':') {
return self.parse_labelled_stmt(ident);
}
box Expr {
span,
node: ExprKind::Ident(ident),
}
}
expr => expr,
};
expect!(';');
Ok(Stmt {
span: span!(start),
node: StmtKind::Expr(expr),
}.into())
}
fn parse_if_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("if");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
expect!(')');
let consequent = {
// Annex B
if !self.ctx.strict && is!("function") {
// TODO: report error?
}
box self.parse_stmt(false)?
};
let alt = if eat!("else") {
Some(box self.parse_stmt(false)?)
} else {
None
};
Ok(StmtKind::If {
test,
consequent,
alt,
})
})
}
fn parse_return_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("return");
let arg = if is!(';') {
None
} else {
self.include_in_expr(true).parse_expr().map(Some)?
};
expect!(';');
Ok(StmtKind::Return { arg })
})
}
fn parse_switch_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("switch");
expect!('(');
let discriminant = self.include_in_expr(true).parse_expr()?;
expect!(')');
let mut cur = None;
let mut cases = vec![];
let mut has_default = false;
expect!('{');
while !eof!() && !is!('}') {
if is_one_of!("case", "default") {
let is_case = is!("case");
bump!();
cases.extend(cur.take());
let test = if is_case {
self.include_in_expr(true).parse_expr().map(Some)?
} else {
if has_default {
syntax_error!(SyntaxError::MultipleDefault)
}
has_default = true;
None
};
expect!(':');
cur = Some(SwitchCase {
test,
consequent: vec![],
});
} else {
match cur {
Some(ref mut cur) => {
cur.consequent.push(self.parse_stmt_list_item(false)?);
}
None => unexpected!(),
}
}
}
assert_and_bump!('}');
cases.extend(cur);
Ok(StmtKind::Switch {
discriminant,
cases,
})
})
}
fn parse_throw_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("throw");
if self.input.had_line_break_before_cur() {
syntax_error!(SyntaxError::LineBreakInThrow)
}
let arg = self.include_in_expr(true).parse_expr()?;
expect!(';');
Ok(StmtKind::Throw { arg })
})
}
fn parse_try_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("try");
let block = self.parse_block()?;
let handler = if eat!("catch") {
let param = self.parse_catch_param()?;
self.parse_block()
.map(|body| CatchClause { param, body })
.map(Some)?
} else {
None
};
let finalizer = if eat!("finally") {
self.parse_block().map(Some)?
} else {
if handler.is_none() {
unexpected!();
}
None
};
Ok(StmtKind::Try {
block,
handler,
finalizer,
})
})
}
fn parse_catch_param(&mut self) -> PResult<Pat> {
expect!('(');
let pat = self.parse_binding_pat_or_ident()?;
expect!(')');
Ok(pat)
}
fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<VarDecl> {
let start = cur_pos!();
let kind = match bump!() {
tok!("const") => VarDeclKind::Const,
tok!("let") => VarDeclKind::Let,
tok!("var") => VarDeclKind::Var,
_ => unreachable!(),
};
let mut decls = vec![];
let mut first = true;
while first || eat!(',') {
if first {
first = false;
}
decls.push(self.parse_var_declarator(for_loop)?);
}
if !for_loop {
expect!(';');
}
Ok(VarDecl {
span: span!(start),
kind,
decls,
})
}
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<VarDeclarator> {
let start = cur_pos!();
let name = self.parse_binding_pat_or_ident()?;
//FIXME: This is wrong. Should check in/of only on first loop.
let init = if !for_loop || !is_one_of!("in", "of") {
if eat!('=') {
Some(self.parse_assignment_expr()?)
} else {
// Destructuring bindings require initializers.
match name.node {
PatKind::Ident(..) => None,
_ => syntax_error!(SyntaxError::PatVarWithoutInit { span: span!(start) }),
}
}
} else {
// e.g. for(let a;;)
None
};
return Ok(VarDeclarator {
span: span!(start),
name,
init,
});
}
fn parse_do_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("do");
let body = box self.parse_stmt(false)?;
expect!("while");
let test = self.include_in_expr(true).parse_expr()?;
// We *may* eat semicolon.
let _ = eat!(';');
Ok(StmtKind::DoWhile { test, body })
})
}
fn parse_while_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("while");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
Ok(StmtKind::While { test, body })
})
}
fn parse_with_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("with");
expect!('(');
let obj = self.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
Ok(StmtKind::With { obj, body })
})
}
pub(super) fn parse_block(&mut self) -> PResult<BlockStmt> {
spanned!({
expect!('{');
let stmts = self.parse_block_body(false, Some(&RBrace))?;
Ok(stmts)
})
}
fn parse_labelled_stmt(&mut self, label: Ident) -> PResult<Stmt> {
let start = label.span.start;
for l in &self.state.labels {
if label.sym == *l {
syntax_error!(SyntaxError::DuplicateLabel(label.sym.clone()))
}
}
let body = box if is!("function") {
self.parse_fn_decl().map(Stmt::from)?
} else {
self.parse_stmt(false)?
};
Ok(Stmt {
span: span!(start),
node: StmtKind::Labeled { label, body },
})
}
fn parse_for_stmt(&mut self) -> PResult<Stmt> {
spanned!({
assert_and_bump!("for");
expect!('(');
let head = self.parse_for_head()?;
expect!(')');
let body = box self.parse_stmt(false)?;
Ok(match head {
ForHead::For { init, test, update } => StmtKind::For {
init,
test,
update,
body,
},
ForHead::ForIn { left, right } => StmtKind::ForIn { left, right, body },
ForHead::ForOf { left, right } => StmtKind::ForOf { left, right, body },
})
})
}
fn parse_for_head(&mut self) -> PResult<ForHead> {
let start = cur_pos!();
if is_one_of!("const", "var")
|| (is!("let") && peek!()?.follows_keyword_let(self.ctx.strict))
{
let decl = self.parse_var_stmt(true)?;
if is_one_of!("of", "in") {
return self.parse_for_each_head(VarDeclOrPat::VarDecl(decl));
}
expect_exact!(';');
return self.parse_normal_for_head(Some(VarDeclOrExpr::VarDecl(decl)));
}
let init = if eat_exact!(';') {
return self.parse_normal_for_head(None);
} else {
self.include_in_expr(false).parse_expr_or_pat()?
};
// for (a of b)
if is_one_of!("of", "in") {
let pat = self.reparse_expr_as_pat(init)?;
return self.parse_for_each_head(VarDeclOrPat::Pat(pat));
}
expect_exact!(';');
self.parse_normal_for_head(Some(VarDeclOrExpr::Expr(init)))
}
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<ForHead> {
let of = bump!() == tok!("of");
if of {
let right = self.include_in_expr(true).parse_assignment_expr()?;
Ok(ForHead::ForOf { left, right })
} else {
let right = self.include_in_expr(true).parse_expr()?;
Ok(ForHead::ForOf { left, right })
}
}
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<ForHead> {
let test = if eat_exact!(';') {
None
} else {
let test = self.include_in_expr(true).parse_expr().map(Some)?;
expect_exact!(';');
test
};
let update = if is!(')') {
None
} else {
self.include_in_expr(true).parse_expr().map(Some)?
};
Ok(ForHead::For { init, test, update })
}
}
#[ast_node]
enum ForHead {
For {
init: Option<VarDeclOrExpr>,
test: Option<Box<Expr>>,
update: Option<Box<Expr>>,
},
ForIn {
left: VarDeclOrPat,
right: Box<Expr>,
},
ForOf {
left: VarDeclOrPat,
right: Box<Expr>,
},
}
pub(super) trait StmtLikeParser<Type> {
fn accept_import_export() -> bool;
fn handle_import_export(&mut self, top_level: bool) -> PResult<Type>;
}
impl<I: Input> StmtLikeParser<Stmt> for Parser<I> {
fn accept_import_export() -> bool {
false
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<Stmt> {
unreachable!()
}
}
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
fn stmt(s: &'static str) -> Stmt {
mk(s).parse_stmt(true).expect("failed to parse a statement")
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
#[test]
fn expr_stmt() {
assert_eq_ignore_span!(
stmt("a + b + c"),
Stmt {
span: Default::default(),
node: StmtKind::Expr(expr("a + b + c")),
}
)
}
#[test]
fn throw_this() {
assert_eq_ignore_span!(
stmt("throw this"),
Stmt {
span: Default::default(),
node: StmtKind::Throw { arg: expr("this") },
}
)
}
#[test]
fn no_empty_without_semi() {
assert_eq_ignore_span!(
stmt("{ return 1 }"),
stmt(
"{
return 1
}"
)
);
assert_eq_ignore_span!(
stmt("{ return 1; }"),
Stmt {
span,
node: StmtKind::Block(BlockStmt {
span,
stmts: vec![stmt("return 1")],
}),
}
);
}
#[test]
fn if_else() {
assert_eq_ignore_span!(
stmt("if (a) b; else c"),
Stmt {
span,
node: StmtKind::If {
test: expr("a"),
consequent: box stmt("b;"),
alt: Some(box stmt("c")),
},
}
);
}
}

View File

@ -0,0 +1,265 @@
use super::*;
#[parser]
impl<I: Input> Parser<I> {
fn parse_import(&mut self) -> PResult<ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("import");
// Handle import 'mod.js'
match *cur!()? {
Str(..) => match bump!() {
Str(src, _) => {
expect!(';');
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::Import {
src,
specifiers: vec![],
},
});
}
_ => unreachable!(),
},
_ => {}
}
let mut specifiers = vec![];
if is!(BindingIdent) {
let local = self.parse_imported_default_binding()?;
//TODO: Better error reporting
if !is!("from") {
expect!(',');
}
specifiers.push(ImportSpecifier {
span: local.span,
local,
node: ImportSpecifierKind::Default,
});
}
{
let import_spec_start = cur_pos!();
if eat!('*') {
expect!("as");
let local = self.parse_imported_binding()?;
specifiers.push(ImportSpecifier {
span: span!(import_spec_start),
local,
node: ImportSpecifierKind::Namespace,
});
} else if eat!('{') {
let mut first = true;
while !eof!() && !is!('}') {
if first {
first = false;
} else {
if eat!(',') {
if is!('}') {
break;
}
}
}
specifiers.push(self.parse_import_specifier()?);
}
expect!('}');
}
}
let src = self.parse_from_clause_and_semi()?;
Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::Import { specifiers, src },
})
}
/// Parse `foo`, `foo2 as bar` in `import { foo, foo2 as bar }`
fn parse_import_specifier(&mut self) -> PResult<ImportSpecifier> {
let start = cur_pos!();
match *cur!()? {
Word(..) => {
let orig_name = self.parse_ident_name()?;
if eat!("as") {
let local = self.parse_binding_ident()?;
return Ok(ImportSpecifier {
span: Span {
start,
end: local.span.end,
},
local,
node: ImportSpecifierKind::Specific {
imported: Some(orig_name),
},
});
}
// TODO: Check if it's binding ident.
let local = orig_name;
return Ok(ImportSpecifier {
span: span!(start),
local,
node: ImportSpecifierKind::Specific { imported: None },
});
}
_ => unexpected!(),
}
}
fn parse_imported_default_binding(&mut self) -> PResult<Ident> {
self.parse_imported_binding()
}
fn parse_imported_binding(&mut self) -> PResult<Ident> {
self.with_ctx(Context {
in_async: false,
in_generator: false,
..self.ctx
}).parse_binding_ident()
}
fn parse_export(&mut self) -> PResult<ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("export");
if eat!('*') {
let src = self.parse_from_clause_and_semi()?;
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::ExportAll { src },
});
}
if eat!("default") {
let decl = if is!("class") {
self.parse_default_class()?
} else if is!("async") && peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
self.parse_default_async_fn()?
} else if is!("function") {
self.parse_default_fn()?
} else {
let expr = self.include_in_expr(true).parse_assignment_expr()?;
expect!(';');
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::ExportDefaultExpr(expr),
});
};
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::ExportDefaultDecl(decl),
});
}
let decl = if is!("class") {
self.parse_class_decl()?
} else if is!("async") && peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
self.parse_async_fn_decl()?
} else if is!("function") {
self.parse_fn_decl()?
} else if is!("var") || is!("const")
|| (is!("let")
&& peek!()
.map(|t| {
// module code is always in strict mode.
t.follows_keyword_let(true)
})
.unwrap_or(false))
{
self.parse_var_stmt(false).map(Decl::Var)?
} else {
// export {};
// export {} from '';
expect!('{');
let mut specifiers = vec![];
let mut first = true;
while is_one_of!(',', IdentName) {
if first {
first = false;
} else {
if eat!(',') {
if is!('}') {
break;
}
}
}
specifiers.push(self.parse_export_specifier()?);
}
expect!('}');
let src = if is!("from") {
Some(self.parse_from_clause_and_semi()?)
} else {
None
};
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::ExportNamed { specifiers, src },
});
};
return Ok(ModuleDecl {
span: span!(start),
node: ModuleDeclKind::ExportDecl(decl),
});
}
fn parse_export_specifier(&mut self) -> PResult<ExportSpecifier> {
let orig = self.parse_ident_name()?;
let exported = if eat!("as") {
Some(self.parse_ident_name()?)
} else {
None
};
Ok(ExportSpecifier { orig, exported })
}
fn parse_from_clause_and_semi(&mut self) -> PResult<String> {
expect!("from");
match *cur!()? {
Str(..) => match bump!() {
Str(src, _) => {
expect!(';');
Ok(src)
}
_ => unreachable!(),
},
_ => unexpected!(),
}
}
}
#[parser]
impl<I: Input> StmtLikeParser<ModuleItem> for Parser<I> {
fn accept_import_export() -> bool {
true
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<ModuleItem> {
if !top_level {
syntax_error!(SyntaxError::NonTopLevelImportExport);
}
let start = cur_pos!();
let decl = if is!("import") {
self.parse_import()?
} else if is!("export") {
self.parse_export()?
} else {
unreachable!("handle_import_export should not be called if current token isn't import nor export")
};
Ok(ModuleItem::ModuleDecl(decl))
}
}

View File

@ -0,0 +1,56 @@
use super::*;
impl<I: Input> Parser<I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn with_ctx(&mut self, ctx: Context) -> WithCtx<I> {
let orig_ctx = self.ctx;
self.ctx = ctx;
WithCtx {
orig_ctx,
inner: self,
}
}
/// Original context is restored when returned guard is dropped.
pub(super) fn include_in_expr(&mut self, include_in_expr: bool) -> WithCtx<I> {
self.with_ctx(Context {
include_in_expr,
..self.ctx
})
}
/// Parse with given closure
pub(super) fn parse_with<F, Ret>(&mut self, f: F) -> Ret
where
F: FnOnce(&mut Self) -> Ret,
{
f(self)
}
}
pub trait ParseObject<Obj> {
type Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Obj;
fn parse_object_prop(&mut self) -> PResult<Self::Prop>;
}
pub struct WithCtx<'a, I: 'a + Input> {
inner: &'a mut Parser<I>,
orig_ctx: Context,
}
impl<'a, I: Input> Deref for WithCtx<'a, I> {
type Target = Parser<I>;
fn deref(&self) -> &Parser<I> {
&self.inner
}
}
impl<'a, I: Input> DerefMut for WithCtx<'a, I> {
fn deref_mut(&mut self) -> &mut Parser<I> {
&mut self.inner
}
}
impl<'a, I: Input> Drop for WithCtx<'a, I> {
fn drop(&mut self) {
self.inner.ctx = self.orig_ctx;
}
}

View File

@ -0,0 +1,481 @@
//! Ported from [babel/bablyon][]
//!
//! [babel/bablyon]:https://github.com/babel/babel/blob/2d378d076eb0c5fe63234a8b509886005c01d7ee/packages/babylon/src/tokenizer/types.js
pub use self::AssignOpToken::*;
pub use self::BinOpToken::*;
pub use self::Keyword::*;
pub use self::Token::*;
pub use self::Word::*;
pub use ast::AssignOp as AssignOpToken;
use ast::BinaryOp;
pub use ast::Number;
use std::fmt::{self, Debug, Display, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
#[derive(Kind, Debug, Clone, PartialEq)]
#[kind(functions(starts_expr = "bool", before_expr = "bool"))]
pub enum Token {
/// Identifier, "null", "true", "false".
///
/// Contains `null` and ``
#[kind(delegate)]
Word(Word),
/// '=>'
#[kind(before_expr)]
Arrow,
/// '@'
At,
/// '.'
Dot,
/// '...'
#[kind(before_expr)]
DotDotDot,
/// '!'
#[kind(before_expr, starts_expr)]
Bang,
/// '('
#[kind(before_expr, starts_expr)]
LParen,
/// ')'
RParen,
/// `[`
#[kind(before_expr, starts_expr)]
LBracket,
/// ']'
RBracket,
/// '{'
LBrace,
/// '}'
RBrace,
/// ';'
#[kind(before_expr)]
Semi,
/// ','
#[kind(before_expr)]
Comma,
/// '`'
#[kind(starts_expr)]
BackQuote,
Template(String),
/// ':'
#[kind(before_expr)]
Colon,
/// '::'
#[kind(before_expr)]
ColonColon,
///
#[kind(delegate)]
BinOp(BinOpToken),
///
#[kind(before_expr)]
AssignOp(AssignOpToken),
/// '${'
#[kind(before_expr, starts_expr)]
DollarLBrace,
/// '?'
#[kind(before_expr)]
QuestionMark,
/// `++`
#[kind(before_expr, starts_expr)]
PlusPlus,
/// `--`
#[kind(before_expr, starts_expr)]
MinusMinus,
/// `~`
#[kind(before_expr, starts_expr)]
Tilde,
/// String literal.
/// bool field is true if it's enclosed by '"' ( double quote).
#[kind(starts_expr)]
Str(String, bool),
/// Regexp literal.
#[kind(starts_expr)]
Regex(String, JsWord),
/// TODO: Make Num as enum and separate decimal, binary, ..etc
#[kind(starts_expr)]
Num(Number),
}
#[derive(Kind, Debug, Clone, Copy, Eq, PartialEq, Hash)]
#[kind(functions(starts_expr = "bool"))]
pub enum BinOpToken {
/// `==`
EqEq,
/// `!=`
NotEq,
/// `===`
EqEqEq,
/// `!==`
NotEqEq,
/// `<`
Lt,
/// `<=`
LtEq,
/// `>`
Gt,
/// `>=`
GtEq,
/// `<<`
LShift,
/// `>>`
RShift,
/// `>>>`
ZeroFillRShift,
/// `+`
#[kind(starts_expr)]
Add,
/// `-`
#[kind(starts_expr)]
Sub,
/// `*`
Mul,
/// `/`
Div,
/// `%`
Mod,
/// `|`
BitOr,
/// `^`
BitXor,
/// `&`
BitAnd,
// /// `in`
// #[kind(precedence = "7")]
// In,
// /// `instanceof`
// #[kind(precedence = "7")]
// InstanceOf,
/// `**`
Exp,
/// `||`
LogicalOr,
/// `&&`
LogicalAnd,
}
impl BinOpToken {
fn before_expr(&self) -> bool {
true
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct TokenAndSpan {
pub token: Token,
pub span: Span,
}
#[derive(Kind, Clone, PartialEq, Eq, Hash)]
#[kind(functions(starts_expr = "bool", before_expr = "bool"))]
pub enum Word {
#[kind(delegate)]
Keyword(Keyword),
#[kind(starts_expr)]
Null,
#[kind(starts_expr)]
True,
#[kind(starts_expr)]
False,
#[kind(starts_expr)]
Ident(JsWord),
}
impl From<JsWord> for Word {
fn from(i: JsWord) -> Self {
match i {
js_word!("null") => Null,
js_word!("true") => True,
js_word!("false") => False,
js_word!("await") => Await.into(),
js_word!("break") => Break.into(),
js_word!("case") => Case.into(),
js_word!("catch") => Catch.into(),
js_word!("continue") => Continue.into(),
js_word!("debugger") => Debugger.into(),
js_word!("default") => Default_.into(),
js_word!("do") => Do.into(),
js_word!("export") => Export.into(),
js_word!("else") => Else.into(),
js_word!("finally") => Finally.into(),
js_word!("for") => For.into(),
js_word!("function") => Function.into(),
js_word!("if") => If.into(),
js_word!("return") => Return.into(),
js_word!("switch") => Switch.into(),
js_word!("throw") => Throw.into(),
js_word!("try") => Try.into(),
js_word!("var") => Var.into(),
js_word!("let") => Let.into(),
js_word!("const") => Const.into(),
js_word!("while") => While.into(),
js_word!("with") => With.into(),
js_word!("new") => New.into(),
js_word!("this") => This.into(),
js_word!("super") => Super.into(),
js_word!("class") => Class.into(),
js_word!("extends") => Extends.into(),
js_word!("import") => Import.into(),
js_word!("yield") => Yield.into(),
js_word!("in") => In.into(),
js_word!("instanceof") => InstanceOf.into(),
js_word!("typeof") => TypeOf.into(),
js_word!("void") => Void.into(),
js_word!("delete") => Delete.into(),
_ => Ident(i),
}
}
}
impl From<Keyword> for Word {
fn from(kwd: Keyword) -> Self {
Keyword(kwd)
}
}
impl From<Word> for JsWord {
fn from(w: Word) -> Self {
match w {
Keyword(k) => match k {
Await => js_word!("await"),
Break => js_word!("break"),
Case => js_word!("case"),
Catch => js_word!("catch"),
Continue => js_word!("continue"),
Debugger => js_word!("debugger"),
Default_ => js_word!("default"),
Do => js_word!("do"),
Else => js_word!("else"),
Finally => js_word!("finally"),
For => js_word!("for"),
Function => js_word!("function"),
If => js_word!("if"),
Return => js_word!("return"),
Switch => js_word!("switch"),
Throw => js_word!("throw"),
Try => js_word!("try"),
Var => js_word!("var"),
Let => js_word!("let"),
Const => js_word!("const"),
While => js_word!("while"),
With => js_word!("with"),
New => js_word!("new"),
This => js_word!("this"),
Super => js_word!("super"),
Class => js_word!("class"),
Extends => js_word!("extends"),
Export => js_word!("export"),
Import => js_word!("import"),
Yield => js_word!("yield"),
In => js_word!("in"),
InstanceOf => js_word!("instanceof"),
TypeOf => js_word!("typeof"),
Void => js_word!("void"),
Delete => js_word!("delete"),
},
Null => js_word!("null"),
True => js_word!("true"),
False => js_word!("false"),
Ident(w) => w,
}
}
}
impl Debug for Word {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Word::Ident(ref s) => Display::fmt(s, f),
_ => {
let s: JsWord = self.clone().into();
Display::fmt(&s, f)
}
}
}
}
impl Word {
pub(crate) fn is_reserved_word(&self, strict: bool) -> bool {
match *self {
Keyword(Let) => strict,
Keyword(Await) | Keyword(Yield) => strict,
Keyword(_) => true,
Null | True | False => true,
Ident(ref name) => {
if name == "enum" {
return true;
}
if strict {
match &**name {
"implements" | "package" | "protected" | "interface" | "private"
| "public" => return true,
_ => {}
}
}
false
}
}
}
}
/// Keywords
#[derive(Kind, Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[kind(function(before_expr = "bool", starts_expr = "bool"))]
pub enum Keyword {
/// Spec says this might be identifier.
#[kind(before_expr)]
Await,
Break,
#[kind(before_expr)]
Case,
Catch,
Continue,
Debugger,
#[kind(before_expr)]
Default_,
#[kind(before_expr)]
Do,
#[kind(before_expr)]
Else,
Finally,
For,
#[kind(starts_expr)]
Function,
If,
#[kind(before_expr)]
Return,
Switch,
#[kind(before_expr, starts_expr)]
Throw,
Try,
Var,
Let,
Const,
While,
With,
#[kind(before_expr, starts_expr)]
New,
#[kind(starts_expr)]
This,
#[kind(starts_expr)]
Super,
#[kind(starts_expr)]
Class,
#[kind(before_expr)]
Extends,
Export,
#[kind(starts_expr)]
Import,
/// Spec says this might be identifier.
#[kind(before_expr, starts_expr)]
Yield,
#[kind(before_expr)]
In,
#[kind(before_expr)]
InstanceOf,
#[kind(before_expr, starts_expr)]
TypeOf,
#[kind(before_expr, starts_expr)]
Void,
#[kind(before_expr, starts_expr)]
Delete,
}
impl From<BinOpToken> for BinaryOp {
fn from(t: BinOpToken) -> Self {
use self::BinaryOp::*;
match t {
BinOpToken::EqEq => EqEq,
BinOpToken::NotEq => NotEq,
BinOpToken::EqEqEq => EqEqEq,
BinOpToken::NotEqEq => NotEqEq,
BinOpToken::Lt => Lt,
BinOpToken::LtEq => LtEq,
BinOpToken::Gt => Gt,
BinOpToken::GtEq => GtEq,
BinOpToken::LShift => LShift,
BinOpToken::RShift => RShift,
BinOpToken::ZeroFillRShift => ZeroFillRShift,
BinOpToken::Add => Add,
BinOpToken::Sub => Sub,
BinOpToken::Mul => Mul,
BinOpToken::Div => Div,
BinOpToken::Mod => Mod,
BinOpToken::BitOr => BitOr,
BinOpToken::BitXor => BitXor,
BinOpToken::BitAnd => BitAnd,
BinOpToken::LogicalOr => LogicalOr,
BinOpToken::LogicalAnd => LogicalAnd,
BinOpToken::Exp => Exp,
}
}
}
impl Token {
/// Returns true if `self` can follow keyword let.
///
/// e.g. `let a = xx;`, `let {a:{}} = 1`
pub(crate) fn follows_keyword_let(&self, _strict: bool) -> bool {
match *self {
// This is required to recognize `let let` in strict mode.
tok!("let") => true,
tok!('{') | tok!('[') | Word(Ident(..)) | tok!("yield") | tok!("await") => true,
_ => false,
}
}
}

@ -0,0 +1 @@
Subproject commit 8900484be19ca00f57ecbd9e66335bc4f404b637

View File

@ -0,0 +1,267 @@
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(specialization)]
#![feature(test)]
#[macro_use]
extern crate slog;
extern crate swc_common;
extern crate swc_ecmascript;
extern crate test;
extern crate testing;
use slog::Logger;
use std::env;
use std::fs::File;
use std::fs::read_dir;
use std::io::{self, Read};
use std::panic::{catch_unwind, resume_unwind};
use std::path::Path;
use swc_common::Span;
use swc_common::fold::{FoldWith, Folder};
use swc_ecmascript::ast::*;
use swc_ecmascript::lexer::Lexer;
use swc_ecmascript::parser::{PResult, Parser};
use test::{test_main, Options, TestDesc, TestDescAndFn, TestFn, TestName};
use test::ShouldPanic::No;
const IGNORED_PASS_TESTS: &[&str] = &[
// Wrong tests (variable name or value is different)
"0339fa95c78c11bd.js",
"0426f15dac46e92d.js",
"0b4d61559ccce0f9.js",
"0f88c334715d2489.js",
"1093d98f5fc0758d.js",
"15d9592709b947a0.js",
"2179895ec5cc6276.js",
"247a3a57e8176ebd.js",
"441a92357939904a.js",
"47f974d6fc52e3e4.js",
"4e1a0da46ca45afe.js",
"5829d742ab805866.js",
"589dc8ad3b9aa28f.js",
"598a5cedba92154d.js",
"72d79750e81ef03d.js",
"7788d3c1e1247da9.js",
"7b72d7b43bedc895.js",
"7dab6e55461806c9.js",
"82c827ccaecbe22b.js",
"87a9b0d1d80812cc.js",
"8c80f7ee04352eba.js",
"96f5d93be9a54573.js",
"988e362ed9ddcac5.js",
"9bcae7c7f00b4e3c.js",
"a8a03a88237c4e8f.js",
"ad06370e34811a6a.js",
"b0fdc038ee292aba.js",
"b62c6dd890bef675.js",
"cb211fadccb029c7.js",
"ce968fcdf3a1987c.js",
"db3c01738aaf0b92.js",
"e1387fe892984e2b.js",
"e71c1d5f0b6b833c.js",
"e8ea384458526db0.js",
// We don't implement Annex B fully.
"1c1e2a43fe5515b6.js",
"3dabeca76119d501.js",
"52aeec7b8da212a2.js",
"59ae0289778b80cd.js",
"a4d62a651f69d815.js",
"c06df922631aeabc.js",
];
fn add_test<F: FnOnce() + Send + 'static>(
tests: &mut Vec<TestDescAndFn>,
name: String,
ignore: bool,
f: F,
) {
tests.push(TestDescAndFn {
desc: TestDesc {
name: TestName::DynTestName(name),
ignore: ignore,
should_panic: No,
allow_fail: false,
},
testfn: TestFn::DynTestFn(box f),
});
}
fn unit_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
let root = {
let mut root = Path::new(env!("CARGO_MANIFEST_DIR")).to_path_buf();
root.push("tests");
root.push("test262-parser");
root
};
eprintln!("Loading tests from {}", root.display());
let pass_dir = root.join("pass");
let files = read_dir(&pass_dir)?;
for entry in files {
let entry = entry?;
let file_name = entry
.path()
.strip_prefix(&pass_dir)
.expect("failed to string prefix")
.to_str()
.unwrap()
.to_string();
let input = {
let mut buf = String::new();
File::open(entry.path())?.read_to_string(&mut buf)?;
buf
};
let explicit = {
let mut buf = String::new();
File::open(root.join("pass-explicit").join(&file_name))?.read_to_string(&mut buf)?;
buf
};
// TODO: remove this
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
let module = file_name.contains("module");
let name = format!("test262_parser_pass_{}", file_name);
add_test(tests, name, ignore, move || {
println!(
"\n\n\nRunning test {}\nSource:\n{}\nExplicit:\n{}",
file_name, input, explicit
);
let res = catch_unwind(move || {
if module {
let p = |ty, s| {
parse_module(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
} else {
let p = |ty, s| {
parse_script(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
}
});
match res {
Ok(()) => {}
Err(err) => resume_unwind(err),
}
});
}
Ok(())
}
fn logger(file_name: &str, src: &str) -> Logger {
let (f, s): (String, String) = (file_name.into(), src.into());
::testing::logger().new(o!("file name" => f, "src" => s,))
}
fn parse_script(file_name: &str, s: &str) -> PResult<Vec<Stmt>> {
let l = logger(file_name, s);
Parser::new_for_script(l.clone(), Lexer::new_from_str(l, s), false)
.parse_script()
.map(normalize)
}
fn parse_module(file_name: &str, s: &str) -> PResult<Module> {
let l = logger(file_name, s);
Parser::new_for_module(l.clone(), Lexer::new_from_str(l, s))
.parse_module()
.map(normalize)
}
fn normalize<T>(mut t: T) -> T
where
Normalizer: Folder<T>,
{
loop {
let mut n = Normalizer {
did_something: false,
};
t = n.fold(t);
if !n.did_something {
return t;
}
}
}
struct Normalizer {
did_something: bool,
}
impl Folder<Span> for Normalizer {
fn fold(&mut self, _: Span) -> Span {
Span::DUMMY
}
}
impl Folder<ExprKind> for Normalizer {
fn fold(&mut self, e: ExprKind) -> ExprKind {
match e {
ExprKind::Paren(e) => self.fold(e.node),
ExprKind::New { callee, args: None } => {
self.did_something = true;
ExprKind::New {
callee: self.fold(callee),
args: Some(vec![]),
}
}
ExprKind::Seq { exprs } => {
let mut exprs = self.fold(exprs);
let need_work = exprs.iter().map(|e| &e.node).any(|n| match *n {
ExprKind::Seq { .. } => true,
_ => false,
});
if need_work {
self.did_something = true;
exprs = exprs.into_iter().fold(vec![], |mut v, e| {
match e.node {
ExprKind::Seq { exprs } => v.extend(exprs),
_ => v.push(e),
}
v
});
}
ExprKind::Seq { exprs }
}
_ => e.fold_children(self),
}
}
}
impl Folder<PropName> for Normalizer {
fn fold(&mut self, n: PropName) -> PropName {
match n {
PropName::Ident(Ident { sym, .. }) => {
self.did_something = true;
PropName::Str(String::from(&*sym))
}
PropName::Num(num) => {
self.did_something = true;
PropName::Str(num.to_string())
}
_ => n.fold_children(self),
}
}
}
#[test]
// #[main]
fn main() {
let args: Vec<_> = env::args().collect();
let mut tests = Vec::new();
unit_tests(&mut tests).unwrap();
test_main(&args, tests, Options::new());
}

View File

@ -0,0 +1,19 @@
[package]
name = "parser_macros"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[lib]
proc-macro = true
[dependencies]
swc_macros_common = { path = "../../macros/common" }
proc-macro2 = "0.2"
[dependencies.syn]
version = "0.12"
features = ["fold"]
[dependencies.quote]
version = "0.4"

View File

@ -0,0 +1,142 @@
use swc_macros_common::prelude::*;
use syn::fold::{self, Fold};
use syn::synom::Synom;
pub fn expand(_attr: TokenStream, item: Item) -> Item {
MyFolder { parser: None }.fold_item(item)
}
struct MyFolder {
parser: Option<Ident>,
}
fn get_joinned_span(t: &ToTokens) -> Span {
let tts: TokenStream = t.dump().into();
let (mut first, mut last) = (None, None);
for tt in tts {
match first {
None => first = Some(tt.span),
_ => {}
}
last = Some(tt.span);
}
let cs = Span::call_site();
first.unwrap_or(cs).join(last.unwrap_or(cs)).unwrap_or(cs)
}
fn parse_args<T, P>(t: TokenStream) -> Punctuated<T, P>
where
T: Synom,
P: Synom,
{
let buf = ::syn::buffer::TokenBuffer::new(t.into());
Punctuated::parse_separated(buf.begin())
.expect("failed parse args")
.0
}
impl Fold for MyFolder {
fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall {
match i.method.as_ref() {
"parse_with" => {
//TODO
return fold::fold_expr_method_call(&mut MyFolder { parser: None }, i);
}
_ => {}
}
fold::fold_expr_method_call(self, i)
}
fn fold_method_sig(&mut self, i: MethodSig) -> MethodSig {
self.parser = i.decl
.inputs
.first()
.map(Pair::into_value)
.cloned()
.and_then(|arg| match arg {
FnArg::SelfRef(ArgSelfRef {
self_token,
mutability: Some(..),
..
})
| FnArg::SelfValue(ArgSelf { self_token, .. }) => {
Some(Ident::new("self", self_token.0))
}
_ => None,
});
i
}
fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure {
if self.parser.is_none() {
// if we don't know what closure is this, don't do anything.
i
} else {
fold::fold_expr_closure(self, i)
}
}
fn fold_macro(&mut self, i: Macro) -> Macro {
let name = i.path.dump().to_string();
let span = get_joinned_span(&i.path);
match &*name {
"vec" | "unreachable" | "tok" | "js_word" => return i,
"println" | "print" | "format" | "assert" | "assert_eq" | "assert_ne"
| "debug_assert" | "debug_assert_eq" | "debug_assert_ne" => {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
args = args.into_pairs()
.map(|el| el.map_item(|expr| self.fold_expr(expr)))
.collect();
return Macro {
tts: args.dump().into(),
..i
};
}
"trace" | "debug" | "info" | "warn" | "error" => return i,
//TODO
"unimplemented" => return i,
"spanned" => {
let block: Block =
parse(i.tts.into()).expect("failed to parse input to spanned as a block");
let block = self.fold_block(block);
return Macro {
tts: TokenStream::from(quote_spanned!(span => self,))
.into_iter()
.chain(TokenStream::from(block.dump()))
.collect(),
..i
};
}
//TODO: Collect expect and give that list to unexpected
"assert_and_bump" | "bump" | "cur" | "cur_pos" | "eat" | "eof" | "eat_exact"
| "expect" | "expect_exact" | "into_spanned" | "is" | "is_one_of" | "peeked_is"
| "peek" | "peek_ahead" | "last_pos" | "return_if_arrow" | "span" | "syntax_error"
| "unexpected" => {
let tts = if i.tts.is_empty() {
quote_spanned!(span => self).into()
} else {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
let args = args.into_pairs()
.map(|el| el.map_item(|expr| self.fold_expr(expr)))
.map(|arg| arg.dump())
.flat_map(|t| TokenStream::from(t));
TokenStream::from(quote_spanned!(span => self,))
.into_iter()
.chain(args)
.collect()
};
return Macro { tts, ..i };
}
_ => {
unimplemented!("Macro: {:#?}", i);
}
}
}
}

View File

@ -0,0 +1,32 @@
//! Simulates unhygienic macro to make parser's code cleaner.
//! It lives here because it's not a generic purpose macro (at all).
//!
//! This can't be implemented with macro_rule! because
//! rust does not support token munching (destructing `$b:block` into `{ $($t:tt)* }`).
//!
//!
#![feature(proc_macro)]
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate swc_macros_common;
extern crate syn;
use proc_macro::TokenStream;
use swc_macros_common::prelude::*;
mod expand;
/// This attribute macro injects first argument of function (typically `self`)
/// to all **known** macros invocation in a function.
///
///
#[proc_macro_attribute]
pub fn parser(attr: TokenStream, item: TokenStream) -> TokenStream {
let item = syn::parse(item).expect("failed to parse input as an item");
let item = expand::expand(attr.into(), item);
print("parser", item.dump())
}

2
ecmascript/src/lib.rs Normal file
View File

@ -0,0 +1,2 @@
pub extern crate swc_ecma_ast as ast;
pub extern crate swc_ecma_parser as parser;

View File

@ -7,5 +7,4 @@ authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
ast_node = { path = "./ast_node" }
enum_kind = { path = "./enum_kind" }
eq_ignore_span = { path = "./eq_ignore_span" }
enum_kind = { path = "./enum_kind" }

View File

@ -8,14 +8,14 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = { git = "https://github.com/kdy1/rust-pmutil" }
proc-macro2 = { version = "0.1", features = ["unstable"] }
pmutil = "0.1"
proc-macro2 = "0.2"
[dependencies.syn]
git = "https://github.com/dtolnay/syn"
features = ["full", "parsing", "printing", "extra-traits"]
version = "0.12"
features = ["derive", "fold", "parsing", "printing"]
[dependencies.synom]
git = "https://github.com/dtolnay/syn"
[dev-dependencies]
swc_macros = { path = "../" }
swc_common = { path = "../../common" }

209
macros/ast_node/src/fold.rs Normal file
View File

@ -0,0 +1,209 @@
use common::prelude::*;
pub fn derive_fold(input: &DeriveInput) -> ItemImpl {
let mut derive_generics = Derive::new(input);
let preds = derive_generics
.all_generic_fields()
.into_iter()
.filter(|f| {
f.attrs
.iter()
.any(|attr| is_attr_name(attr, "fold") && attr.tts.to_string() == "( bound )")
})
.map(|f| f.ty.clone())
.map(normalize_type_for_bound)
.map(|ty| {
Quote::new_call_site()
.quote_with(smart_quote!(
Vars { Type: &ty },
(Type: ::swc_common::fold::FoldWith<__Folder>)
))
.parse()
});
derive_generics.add_where_predicates(preds);
let arms = Binder::new_from(input)
.variants()
.into_iter()
.map(|v| {
// Qualified path of variant.
let qual_name = v.qual_path();
let (pat, bindings) = v.bind("_", None, None);
let fields: Punctuated<FieldValue, token::Comma> = bindings
.into_iter()
.map(|binding| {
// This closure will not be called for unit-like struct.
let field_name: Tokens = binding
.field()
.ident
.as_ref()
.map(|s| s.dump())
.unwrap_or_else(|| {
Index {
index: binding.idx() as _,
span: call_site(),
}.dump()
});
let value = match should_skip_field(binding.field()) {
true => Quote::new_call_site().quote_with(smart_quote!(
Vars {
binded_field: binding.name(),
},
{ binded_field }
)),
false => Quote::new_call_site().quote_with(smart_quote!(
Vars {
FieldType: &binding.field().ty,
binded_field: binding.name(),
},
{
::swc_common::fold::Folder::<FieldType>::fold(
__folder,
binded_field,
)
}
)),
};
let v = Quote::new_call_site()
.quote_with(smart_quote!(
Vars { field_name, value },
(field_name: value)
))
.parse::<FieldValue>();
FieldValue {
attrs: binding
.field()
.attrs
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
..v
}
})
.map(|t| Element::Punctuated(t, call_site()))
.collect();
let body = match *v.data() {
// Handle unit like structs separately
Fields::Unit => box Quote::new_call_site()
.quote_with(smart_quote!(Vars { Name: qual_name }, {
{
return Name;
}
}))
.parse(),
_ => box Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
Name: qual_name,
fields,
},
{
{
return Name { fields };
}
}
))
.parse(),
};
Arm {
body,
attrs: v.attrs()
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
pats: vec![Element::End(pat)].into_iter().collect(),
guard: None,
rocket_token: call_site(),
comma: Some(call_site()),
}
})
.collect();
let body = Expr::Match(ExprMatch {
attrs: Default::default(),
match_token: call_site(),
brace_token: call_site(),
expr: box Quote::new_call_site()
.quote_with(smart_quote!(Vars {}, { self }))
.parse(),
arms,
});
let item = Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
Type: &input.ident,
body,
},
{
impl<__Folder> ::swc_common::fold::FoldWith<__Folder> for Type {
fn fold_children(self, __folder: &mut __Folder) -> Self {
body
}
}
}
))
.parse();
let item = derive_generics.append_to(item);
// println!("Expaned:\n {}\n\n", item.dump());
item
}
fn should_skip_field(field: &Field) -> bool {
let ty_str = field.ty.dump().to_string();
match &*ty_str {
"bool" | "usize" | "u128" | "u64" | "u32" | "u16" | "u8" | "isize" | "i128" | "i64"
| "i32" | "i16" | "i8" | "f64" | "f32" | "String" => return true,
_ => {}
}
false
}
fn normalize_type_for_bound(ty: Type) -> Type {
use syn::fold::{self, Fold};
struct Norm;
impl Fold for Norm {
fn fold_path(&mut self, path: Path) -> Path {
if path.segments.len() == 1 {
let seg = &path.segments[0];
if seg.ident != "Box" && seg.ident != "Option" && seg.ident != "Vec" {
return path.clone();
}
if let PathArguments::AngleBracketed(ref args) = seg.arguments {
if args.args.len() == 1 {
if let GenericArgument::Type(ref ty) =
*args.args.last().unwrap().into_value()
{
match *ty {
Type::Path(TypePath { ref path, .. }) => {
return self.fold_path(path.clone())
}
_ => {}
}
}
}
}
}
fold::fold_path(self, path)
}
}
let out = Norm.fold_type(ty);
out
}

View File

@ -4,40 +4,35 @@
extern crate pmutil;
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate swc_macros_common as common;
extern crate syn;
use self::fold::derive_fold;
use common::prelude::*;
#[proc_macro_derive(AstNode)]
mod fold;
#[proc_macro_derive(AstNode, attributes(fold))]
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse::<DeriveInput>(input).expect("failed to parse input as DeriveInput");
let type_name = &input.ident;
let item: Item = Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
CONST_NAME: type_name.new_ident_with(|n| format!("_IMPL_AST_NODE_FOR_{}", n)),
Type: type_name,
},
{
#[allow(non_upper_case_globals)]
const CONST_NAME: () = {
extern crate swc_common as _swc_common;
impl _swc_common::AstNode for Type {}
()
};
}
))
.parse();
let mut tokens = Tokens::new();
derive_fold(&input).to_tokens(&mut tokens);
print("derive(AstNode)", item.into_tokens())
let item = Quote::new_call_site()
.quote_with(smart_quote!(Vars { Type: type_name }, {
impl ::swc_common::AstNode for Type {}
}))
.parse::<ItemImpl>()
.with_generics(input.generics);
item.to_tokens(&mut tokens);
print("derive(AstNode)", tokens)
}
/// Alias for
///
/// `#[derive(Clone, Debug, Eq, PartialEq, Hash, EqIgnoreSpan, AstNode)]`
/// `#[derive(Clone, Debug, PartialEq, AstNode)]`
#[proc_macro_attribute]
pub fn ast_node(
_attr: proc_macro::TokenStream,
@ -45,12 +40,11 @@ pub fn ast_node(
) -> proc_macro::TokenStream {
let item: Item = syn::parse(s).expect("failed to parse tokens as an item");
// With proc_macro feature enabled, only attributes for first derive works.
// https://github.com/rust-lang/rust/issues/44925
let tokens = pmutil::Quote::new_call_site().quote_with(smart_quote!(Vars { item }, {
#[derive(
Clone, Debug, Eq, PartialEq, Hash,
::swc_macros::EqIgnoreSpan,
::swc_macros::AstNode
)]
#[derive(::swc_macros::AstNode)]
#[derive(Clone, Debug, PartialEq)]
item
}));

View File

@ -0,0 +1,14 @@
#![feature(specialization, proc_macro)]
extern crate swc_common;
extern crate swc_macros;
use swc_macros::ast_node;
#[ast_node]
// See https://github.com/rust-lang/rust/issues/44925
pub struct Class {
pub s: String,
}
#[ast_node]
pub struct Tuple(usize, usize);

View File

@ -0,0 +1,12 @@
#![feature(specialization, proc_macro)]
extern crate swc_common;
extern crate swc_macros;
use swc_macros::ast_node;
#[ast_node]
pub struct Struct {}
#[ast_node]
pub enum Enum {
}

View File

@ -0,0 +1,55 @@
#![feature(specialization, proc_macro)]
extern crate swc_common;
extern crate swc_macros;
use swc_common::fold::{FoldWith, Folder};
use swc_macros::ast_node;
pub trait AssertFolder<T>: Folder<T> {}
// check for trait bound
pub struct LitFolder;
impl Folder<Lit> for LitFolder {
fn fold(&mut self, _: Lit) -> Lit {
Lit::A
}
}
impl AssertFolder<Expr> for LitFolder {}
impl AssertFolder<ExprKind> for LitFolder {}
#[ast_node]
pub struct Expr {
pub node: ExprKind,
/// This field should be skipped.
pub bool_field: bool,
/// Ensure that #[fold(ignore)] works.
#[fold(ignore)]
pub ignored: PanicOnFold,
/* /// Ensure that #[cfg] works.
* #[cfg(feature = "__never_exists")]
* pub never_exists: Lit, */
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct PanicOnFold;
impl<F> FoldWith<F> for PanicOnFold {
fn fold_children(self, _: &mut F) -> Self {
unreachable!("this should not be called")
}
}
#[ast_node]
pub enum ExprKind {
RecursiveBoud(Box<Expr>),
Rec2(Vec<Option<Box<Expr>>>),
Lit(Lit),
}
#[ast_node]
pub enum Lit {
A,
B,
}

View File

@ -0,0 +1,35 @@
//! Ensures that #[derive(AstNode)] works with generic types.
#![feature(specialization, proc_macro)]
extern crate swc_common;
extern crate swc_macros;
use std::fmt::Debug;
use swc_common::AstNode;
use swc_macros::ast_node;
pub trait Ast: Copy + Eq + Debug {
type CustomExpr: AstNode;
}
#[ast_node]
pub struct Stmt<A: Ast> {
#[fold(bound)]
pub expr: Expr<A>,
}
#[ast_node]
pub struct Expr<A: Ast> {
#[fold(bound)]
pub node: ExprKind<A>,
}
#[ast_node]
pub enum ExprKind<A: Ast> {
Custom(
#[fold(bound)]
A::CustomExpr,
),
/// Recursive
Stmt(Box<Stmt<A>>),
}

View File

@ -4,16 +4,10 @@ version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
pmutil = { git = "https://github.com/kdy1/rust-pmutil" }
proc-macro2 = { version = "0.1", features = ["unstable"] }
pmutil = "0.1"
proc-macro2 = "0.2"
quote = "0.4"
[dependencies.syn]
git = "https://github.com/dtolnay/syn"
features = ["full", "parsing", "printing", "extra-traits"]
[dependencies.synom]
git = "https://github.com/dtolnay/syn"
[dependencies.quote]
git = "https://github.com/dtolnay/quote"
version = "0.12"
features = ["derive", "visit", "parsing", "printing", "extra-traits"]

290
macros/common/src/binder.rs Normal file
View File

@ -0,0 +1,290 @@
//! # Example
//!
//! `_binded_a`, `_binded_b` and `_binded_0` in below example are
//! `BinededField`.
//!
//! ```rust
//! struct S {
//! a: u8,
//! b: u16,
//! }
//! let s = S { a: 0, b: 0, };
//! match s {
//! S { a: _binded_a, b: _binded_b } => {}
//! }
//! enum E {
//! V1 { a: u8 },
//! V2(u16),
//! V3,
//! }
//! let e = E::V1{ a: 0 };
//! match e {
//! E::V1 { a: _binded_a } => {}
//! E::V2(_binded_0) => {}
//! E::V3 => {}
//! }
//! ```
//!
//!
//! -----
//!
//! Adopted from `synstructure`.
use is_attr_name;
use pmutil::prelude::*;
use proc_macro2::Span;
use quote::{ToTokens, Tokens};
use syn::*;
use syn::punctuated::Pair;
use syn::token::{Mut, Ref};
use syn_ext::PairExt;
/// Used to bind whole struct or enum.
#[derive(Debug, Clone)]
pub struct Binder<'a> {
ident: &'a Ident,
body: &'a Data,
attrs: &'a [Attribute],
}
impl<'a> Binder<'a> {
/// - `attrs`: Attributes of the type.
pub const fn new(ident: &'a Ident, body: &'a Data, attrs: &'a [Attribute]) -> Self {
Binder { ident, body, attrs }
}
pub fn new_from(input: &'a DeriveInput) -> Self {
Self::new(&input.ident, &input.data, &input.attrs)
}
///
pub fn variants(&self) -> Vec<VariantBinder<'a>> {
match *self.body {
Data::Enum(DataEnum { ref variants, .. }) => {
let enum_name = &self.ident;
variants
.iter()
.map(|v| VariantBinder::new(Some(enum_name), &v.ident, &v.fields, &v.attrs))
.collect()
}
Data::Struct(DataStruct { ref fields, .. }) => {
vec![VariantBinder::new(None, &self.ident, fields, self.attrs)]
}
Data::Union(_) => unimplemented!("Binder for union type"),
}
}
}
/// Variant.
#[derive(Debug, Clone)]
pub struct VariantBinder<'a> {
/// None for struct.
enum_name: Option<&'a Ident>,
/// Name of variant.
name: &'a Ident,
data: &'a Fields,
attrs: &'a [Attribute],
}
impl<'a> VariantBinder<'a> {
pub const fn new(
enum_name: Option<&'a Ident>,
name: &'a Ident,
data: &'a Fields,
attrs: &'a [Attribute],
) -> Self {
VariantBinder {
enum_name,
name,
data,
attrs,
}
}
pub const fn variant_name(&self) -> &Ident {
self.name
}
pub const fn data(&self) -> &Fields {
self.data
}
pub const fn attrs(&self) -> &[Attribute] {
self.attrs
}
/// `EnumName::VariantName` for enum, and `StructName` for struct.
pub fn qual_path(&self) -> Path {
match self.enum_name {
Some(enum_name) => Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
EnumName: enum_name,
VariantName: self.name,
},
{ EnumName::VariantName }
))
.parse(),
None => self.name.clone().into(),
}
}
/// - `prefix`: prefix of field binding.
pub fn bind(
&self,
prefix: &str,
by_ref: Option<Ref>,
mutability: Option<Mut>,
) -> (Pat, Vec<BindedField<'a>>) {
let path = self.qual_path();
let (pat, bindings) = match self.data {
&Fields::Unit => {
// EnumName::VariantName
let pat = Pat::Path(PatPath { qself: None, path });
// Unit struct does not have any field to bind
(pat, vec![])
}
&Fields::Named(FieldsNamed {
named: ref fields,
brace_token,
}) => {
let mut bindings = vec![];
let fields = fields
.pairs()
.map(|e| {
let (t, p) = e.into_tuple();
Pair::new(t, p.cloned())
})
.enumerate()
.map(|(idx, f)| {
f.map_item(|f| {
let ident = f.ident
.expect("field of struct-like variants should have name");
let binded_ident = ident.new_ident_with(|s| format!("{}{}", prefix, s));
bindings.push(BindedField {
idx,
binded_ident: binded_ident.clone(),
field: f,
});
FieldPat {
attrs: f.attrs
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
colon_token: f.colon_token,
member: Member::Named(ident),
pat: box Pat::Ident(PatIdent {
by_ref,
mutability,
ident: binded_ident,
subpat: None,
}),
}
})
})
.collect();
// EnumName::VariantName { fields }
let pat = Pat::Struct(PatStruct {
path,
fields,
brace_token,
dot2_token: None,
});
(pat, bindings)
}
&Fields::Unnamed(FieldsUnnamed {
unnamed: ref fields,
paren_token,
}) => {
// TODO
let mut bindings = vec![];
let pats = fields
.pairs()
.map(|e| {
let (t, p) = e.into_tuple();
Pair::new(t, p.cloned())
})
.enumerate()
.map(|(idx, f)| {
f.map_item(|f| {
let binded_ident =
Span::call_site().new_ident(format!("{}{}", prefix, idx));
bindings.push(BindedField {
idx,
binded_ident: binded_ident.clone(),
field: f,
});
Pat::Ident(PatIdent {
by_ref,
mutability,
ident: binded_ident,
subpat: None,
})
})
})
.collect();
// EnumName::VariantName ( fields )
let pat = Pat::TupleStruct(PatTupleStruct {
path,
pat: PatTuple {
dot2_token: None,
front: pats,
back: Default::default(),
paren_token,
comma_token: None,
},
});
(pat, bindings)
}
};
// if we don't need to move fields, we should match on reference to make tuple
// work.
let pat = match by_ref {
Some(ref_token) => Pat::Ref(PatRef {
pat: box pat,
and_token: ref_token.0.as_token(),
mutability,
}),
None => pat,
};
(pat, bindings)
}
}
/// Binded field. Note that this struct acts like a binded variable for
/// `quote!`.
///
///
#[derive(Debug, Clone)]
pub struct BindedField<'a> {
binded_ident: Ident,
idx: usize,
field: &'a Field,
}
impl<'a> BindedField<'a> {
pub const fn idx(&self) -> usize {
self.idx
}
/// Name of field binding.
pub const fn name(&self) -> &Ident {
&self.binded_ident
}
pub const fn field(&self) -> &Field {
self.field
}
}
impl<'a> ToTokens for BindedField<'a> {
fn to_tokens(&self, t: &mut Tokens) {
self.binded_ident.to_tokens(t)
}
}

View File

@ -0,0 +1,115 @@
use super::*;
use std::collections::BTreeSet;
use syn::visit::{self, Visit};
impl<'a> Derive<'a> {
pub fn all_generic_fields(&self) -> Vec<&'a Field> {
struct TypeVisitor<'a> {
params: &'a BTreeSet<Ident>,
is_generic: bool,
}
impl<'a, 'b> Visit<'a> for TypeVisitor<'b> {
fn visit_path(&mut self, path: &Path) {
if let Some(seg) = path.segments.last() {
if seg.into_value().ident == "PhantomData" {
// Hardcoded exception.
// This assumes name of the associated type is not PhantomData.
return;
}
}
if path.leading_colon.is_none() {
if let Some(seg) = path.segments.first() {
let id = seg.value().ident;
if self.params.contains(&id) {
self.is_generic = true;
}
}
}
visit::visit_path(self, path)
}
fn visit_macro(&mut self, _: &Macro) {}
}
struct FieldVisitor<'a> {
/// Type parameters defined on type.
params: BTreeSet<Ident>,
fields: Vec<&'a Field>,
}
impl<'a: 'b, 'b> Visit<'a> for FieldVisitor<'b> {
fn visit_field(&mut self, field: &'a Field) {
let mut vis = TypeVisitor {
params: &self.params,
is_generic: false,
};
vis.visit_type(&field.ty);
if vis.is_generic {
self.fields.push(field);
}
}
}
let mut vis = FieldVisitor {
params: self.input
.generics
.params
.iter()
.filter_map(|p| match *p {
GenericParam::Type(TypeParam { ref ident, .. }) => Some(ident.clone()),
_ => None,
})
.collect(),
fields: vec![],
};
vis.visit_derive_input(self.input);
vis.fields
}
pub fn add_where_predicates<I>(&mut self, preds: I)
where
I: IntoIterator<Item = WherePredicate>,
{
let preds = preds.into_iter().map(|t| Pair::Punctuated(t, call_site()));
match self.out.generics.where_clause {
Some(WhereClause {
ref mut predicates, ..
}) => {
if !predicates.empty_or_trailing() {
predicates.push_punct(call_site());
}
predicates.extend(preds)
}
None => {
self.out.generics.where_clause = Some(WhereClause {
where_token: call_site(),
predicates: preds.collect(),
})
}
}
}
/// Add `Self: #trait_`.
pub fn bound_self(&mut self, trait_: Path) {
let self_ty: Type = parse(quote!(Self).into()).unwrap();
let bound = WherePredicate::Type(PredicateType {
lifetimes: None,
bounded_ty: self_ty,
colon_token: Default::default(),
// `Trait` in `Self: Trait`
bounds: iter::once(Pair::End(TypeParamBound::Trait(TraitBound {
modifier: TraitBoundModifier::None,
lifetimes: None,
path: trait_,
}))).collect(),
});
self.add_where_predicates(iter::once(bound))
}
}

View File

@ -0,0 +1,108 @@
use call_site;
use pmutil::ToTokensExt;
use quote::{ToTokens, Tokens};
use std::iter;
use syn::*;
use syn::punctuated::Pair;
mod generics;
/// Generics of derived impl item.
///
#[derive(Debug, Clone)]
pub struct Derive<'a> {
input: &'a DeriveInput,
out: ItemImpl,
}
impl<'a> Derive<'a> {
pub fn new(input: &'a DeriveInput) -> Self {
let (generics, self_ty) = {
// Generics for impl cannot have default.
let params = input
.generics
.params
.clone()
.into_pairs()
.map(|mut pair| {
match *pair.value_mut() {
GenericParam::Type(ref mut t) => {
t.eq_token = None;
t.default = None;
}
_ => {}
}
pair
})
.collect();
let generics = Generics {
params,
gt_token: input.generics.gt_token,
lt_token: input.generics.lt_token,
where_clause: input.generics.where_clause.clone(),
};
// Handle generic delcared on type.
let ty: Box<Type> = {
let (_, ty_generics, _) = input.generics.split_for_impl();
let mut t = Tokens::new();
input.ident.to_tokens(&mut t);
ty_generics.to_tokens(&mut t);
box parse(t.dump().into()).unwrap_or_else(|err| {
panic!("failed to parse type: {}\nType: {}", err, t.dump())
})
};
(generics, ty)
};
Derive {
input,
out: ItemImpl {
attrs: vec![],
impl_token: call_site(),
brace_token: call_site(),
defaultness: None,
unsafety: None,
generics,
trait_: None,
self_ty,
items: Default::default(),
},
}
}
/// Set `defaultness`
pub fn defaultness(&mut self, defaultness: Option<token::Default>) {
self.out.defaultness = defaultness;
}
/// Set `unsafety`
pub fn unsafety(&mut self, unsafety: Option<token::Unsafe>) {
self.out.unsafety = unsafety;
}
pub fn input(&self) -> &DeriveInput {
self.input
}
pub fn append_to(mut self, item: ItemImpl) -> ItemImpl {
assert_eq!(self.out.trait_, None);
if !self.out.generics.params.empty_or_trailing() {
self.out.generics.params.push_punct(call_site());
}
self.out
.generics
.params
.extend(item.generics.params.into_pairs());
self.out.trait_ = item.trait_;
self.out.attrs.extend(item.attrs);
self.out.items.extend(item.items);
self.out
}
}

View File

@ -1,15 +1,26 @@
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(const_fn)]
#[macro_use]
extern crate pmutil;
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate syn;
use pmutil::SpanExt;
use pmutil::synom_ext::FromSpan;
use proc_macro2::Span;
use syn::*;
pub mod derive;
pub mod prelude;
mod syn_ext;
pub mod binder;
pub fn call_site<T: FromSpan>() -> T {
FromSpan::from_span(Span::call_site())
Span::call_site().as_token()
}
/// `attr` - tokens inside `#[]`. e.g. `derive(EqIgnoreSpan)`, ast_node
@ -30,6 +41,24 @@ pub fn print<T: Into<proc_macro2::TokenStream>>(
tokens.into()
}
pub fn is_attr_name(attr: &Attribute, name: &str) -> bool {
match *attr {
Attribute {
path:
Path {
leading_colon: None,
ref segments,
},
is_sugared_doc: false,
..
} if segments.len() == 1 =>
{
segments.first().unwrap().into_value().ident == name
}
_ => false,
}
}
/// fail! is a panic! with location reporting.
#[macro_export]
macro_rules! fail {

View File

@ -1,8 +1,10 @@
pub use super::{call_site, is_attr_name, print};
pub use super::binder::{Binder, VariantBinder};
pub use super::derive::Derive;
pub use super::syn_ext::{ItemImplExt, PairExt};
pub use pmutil::prelude::*;
pub use proc_macro2::{Delimiter, Literal, Span, TokenNode, TokenStream, TokenTree};
pub use quote::{ToTokens, Tokens};
pub use super::{call_site, print};
pub use proc_macro2::{Delimiter, Span, TokenNode, TokenStream, TokenTree};
pub use syn::*;
pub use syn::Span as SynSpan;
pub use syn::delimited::{Delimited, Element};
pub use syn::punctuated::{Pair, Punctuated};
pub use syn::punctuated::Pair as Element;

View File

@ -0,0 +1,121 @@
use call_site;
use pmutil::prelude::*;
use syn::*;
use syn::punctuated::Pair;
/// Extension trait for `ItemImpl` (impl block).
pub trait ItemImplExt {
/// Instead of
///
/// ```rust,ignore
/// let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
///
/// let item: Item = Quote::new_call_site()
/// .quote_with(smart_quote!(
/// Vars {
/// Type: type_name,
/// impl_generics,
/// ty_generics,
/// where_clause,
/// },
/// {
/// impl impl_generics ::swc_common::AstNode for Type ty_generics
/// where_clause {}
/// }
/// ))
/// .parse();
/// ```
///
/// You can use this like
///
/// ```rust,ignore
// let item = Quote::new_call_site()
/// .quote_with(smart_quote!(Vars { Type: type_name }, {
/// impl ::swc_common::AstNode for Type {}
/// }))
/// .parse::<ItemImpl>()
/// .with_generics(input.generics);
/// ```
///
fn with_generics(self, generics: Generics) -> Self;
}
impl ItemImplExt for ItemImpl {
fn with_generics(mut self, mut generics: Generics) -> Self {
// TODO: Check conflicting name
let need_new_punct = !generics.params.empty_or_trailing();
if need_new_punct {
generics.params.push_punct(call_site());
}
// Respan
match generics.lt_token {
Some(t) => self.generics.lt_token = Some(t),
None => {}
}
match generics.gt_token {
Some(t) => self.generics.gt_token = Some(t),
None => {}
}
let ty = self.self_ty;
// Handle generics defined on struct, enum, or union.
let mut item: ItemImpl = {
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let item = if let Some((ref polarity, ref path, ref for_token)) = self.trait_ {
quote!{
impl #impl_generics #polarity #path #for_token #ty #ty_generics #where_clause {}
}
} else {
quote!{
impl #impl_generics #ty #ty_generics #where_clause {}
}
};
parse(item.dump().into())
.unwrap_or_else(|err| panic!("with_generics failed: {}\n{}", err, item.dump()))
};
// Handle generics added by proc-macro.
item.generics
.params
.extend(self.generics.params.into_pairs());
match self.generics.where_clause {
Some(WhereClause {
ref mut predicates, ..
}) => predicates.extend(
generics
.where_clause
.into_iter()
.flat_map(|wc| wc.predicates.into_pairs()),
),
ref mut opt @ None => *opt = generics.where_clause,
}
ItemImpl {
attrs: self.attrs,
defaultness: self.defaultness,
unsafety: self.unsafety,
impl_token: self.impl_token,
brace_token: self.brace_token,
items: self.items,
..item
}
}
}
pub trait PairExt<T, P>: Sized + Into<Pair<T, P>> {
fn map_item<F, NewItem>(self, op: F) -> Pair<NewItem, P>
where
F: FnOnce(T) -> NewItem,
{
match self.into() {
Pair::Punctuated(t, p) => Pair::Punctuated(op(t), p),
Pair::End(t) => Pair::End(op(t)),
}
}
}
impl<T, P> PairExt<T, P> for Pair<T, P> {}

View File

@ -8,14 +8,11 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = { git = "https://github.com/kdy1/rust-pmutil" }
proc-macro2 = { version = "0.1", features = ["unstable"] }
pmutil = "0.1"
proc-macro2 = "0.2"
[dependencies.syn]
git = "https://github.com/dtolnay/syn"
version = "0.12"
features = ["full", "parsing", "printing", "extra-traits"]
[dependencies.synom]
git = "https://github.com/dtolnay/syn"

View File

@ -1,6 +1,6 @@
use common::prelude::*;
use input::*;
use util::{is_attr_name, is_bool};
use util::is_bool;
pub fn expand(
Input {
@ -11,6 +11,36 @@ pub fn expand(
vis,
}: Input,
) -> Item {
// verify variant attributes.
{
for v in &variants {
if v.attrs.has_delegate {
match v.data {
Fields::Named(FieldsNamed {
named: ref fields, ..
})
| Fields::Unnamed(FieldsUnnamed {
unnamed: ref fields,
..
}) if fields.len() == 1 => {}
_ => panic!(
"currently #[kind(delegate)] can be applied to variant with only one field"
),
}
}
for value in &v.attrs.fn_values {
let used = attrs
.fns
.iter()
.map(|f| f.name)
.any(|fn_name| value.fn_name == fn_name || value.fn_name == "delegate");
if !used {
panic!("Unknown function `{}` on variant {}", value.fn_name, v.name)
}
}
}
}
let items = attrs
.fns
.into_iter()
@ -21,24 +51,21 @@ pub fn expand(
t
});
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
impl_generics,
name,
ty_generics,
where_clause,
Type: name,
items,
},
{
impl impl_generics name ty_generics where_clause {
impl Type {
items
}
}
))
.parse()
.parse::<ItemImpl>()
.with_generics(generics)
.into()
}
impl FnDef {
@ -51,92 +78,85 @@ impl FnDef {
let name_span = name.span;
let arms = variants
.iter()
.map(|v| -> Arm {
// Pattern for this variant.
let pat = match v.data {
VariantData::Struct(ref _fields, _) => Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
EnumName: enum_name,
VariantName: v.name,
},
{ &EnumName::VariantName {..} }
))
.parse::<Pat>(),
VariantData::Tuple(ref _fields, _) => Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
EnumName: enum_name,
VariantName: v.name,
},
{ &EnumName::VariantName(..) }
))
.parse::<Pat>(),
VariantData::Unit => Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
EnumName: enum_name,
VariantName: v.name,
},
{ &EnumName::VariantName }
))
.parse::<Pat>(),
};
let arms =
variants
.iter()
.map(|v| -> Arm {
// Bind this variant.
let (pat, mut fields) =
VariantBinder::new(Some(enum_name), &v.name, &v.data, &v.attrs.extras)
.bind("_", Some(call_site()), None);
let body = {
let value = match v.attrs
.fn_values
.iter()
.find(|fn_val| fn_val.fn_name == name)
.map(|attr| attr.value.clone())
{
Some(Some(value)) => Some(value),
let body = {
let value = match v.attrs
.fn_values
.iter()
.find(|fn_val| fn_val.fn_name == name)
.map(|attr| attr.value.clone())
{
Some(Some(value)) => Some(value),
// if return type is bool and attribute is specified, it return true.
Some(None) if is_bool(&return_type) => Some(
ExprKind::Lit(Lit {
value: LitKind::Bool(true),
span: SynSpan(Span::call_site()),
}).into(),
),
_ => None,
// not specified, but has `#[kind(delegate)]`
None if v.attrs.has_delegate => {
assert_eq!(fields.len(), 1);
let field = fields.remove(0);
Some(
Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
field,
method: name,
},
{ field.method() }
))
.parse(),
)
}
// if return type is bool and attribute is specified, value is true.
Some(None) if is_bool(&return_type) => Some(Expr::Lit(ExprLit {
attrs: Default::default(),
lit: Lit::Bool(LitBool {
value: true,
span: Span::call_site(),
}),
})),
_ => None,
};
value
.or_else(|| default_value.clone())
.map(Box::new)
.unwrap_or_else(|| {
panic!(
"value of {fn_name} for {variant} is not specified.",
fn_name = name,
variant = v.name
);
})
};
value
.or_else(|| default_value.clone())
.map(Box::new)
.unwrap_or_else(|| {
panic!(
"value of {fn_name} for {variant} is not specified.",
fn_name = name,
variant = v.name
);
})
};
Arm {
pats: vec![Element::End(pat)].into_iter().collect(),
body,
Arm {
pats: vec![pat].into(),
body,
// Forward cfg attributes.
attrs: v.attrs
.extras
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
rocket_token: call_site(),
comma: Some(call_site()),
guard: None,
if_token: None,
}
})
.collect();
// Forward cfg attributes.
attrs: v.attrs
.extras
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
rocket_token: call_site(),
comma: Some(call_site()),
guard: None,
}
})
.collect();
// match self {}
let match_expr = ExprKind::Match(ExprMatch {
let match_expr = Expr::Match(ExprMatch {
attrs: Default::default(),
match_token: call_site(),
brace_token: call_site(),
@ -146,12 +166,12 @@ impl FnDef {
.into(),
arms,
}).into();
});
ImplItemMethod {
sig: MethodSig {
constness: Constness::NotConst,
unsafety: Unsafety::Normal,
constness: None,
unsafety: None,
abi: None,
ident: name,
// fn (&self) -> ReturnTpe
@ -160,30 +180,30 @@ impl FnDef {
paren_token: name.span.as_token(),
inputs: vec![
// TODO
FnArg::SelfRef(ArgSelfRef {
Element::End(FnArg::SelfRef(ArgSelfRef {
and_token: name_span.as_token(),
self_token: name_span.as_token(),
lifetime: None,
mutbl: Mutability::Immutable,
}),
].into(),
output: ReturnType::Type(return_type, name_span.as_token()),
mutability: None,
})),
].into_iter()
.collect(),
output: ReturnType::Type(name_span.as_token(), box return_type),
generics: Default::default(),
variadic: false,
dot_tokens: None,
variadic: None,
},
},
block: Block {
brace_token: call_site(),
stmts: vec![Stmt::Expr(Box::new(match_expr))],
stmts: vec![Stmt::Expr(match_expr)],
},
// TODO
vis,
attrs: Default::default(),
defaultness: Defaultness::Final,
defaultness: None,
}
}
}

View File

@ -37,7 +37,7 @@ pub struct EnumVar {
/// Name of variant.
pub name: Ident,
pub attrs: VariantAttrs,
pub data: VariantData,
pub data: Fields,
}
/// Parsed attributes.
@ -45,6 +45,8 @@ pub struct EnumVar {
pub struct VariantAttrs {
pub fn_values: Vec<VariantAttr>,
pub extras: Vec<Attribute>,
/// Does this variant has `#[kind(delegate)]`?
pub has_delegate: bool,
}
#[derive(Debug)]

View File

@ -135,16 +135,15 @@
//!
//!
//!
#![feature(box_syntax)]
#[macro_use]
extern crate pmutil;
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate swc_macros_common as common;
extern crate syn;
#[macro_use]
extern crate synom;
extern crate syn;
use common::prelude::*;

View File

@ -2,8 +2,8 @@ use common::prelude::*;
use input::*;
use std::fmt::Display;
use std::ops::AddAssign;
use synom::Synom;
use util::{is_attr_name, is_bool};
use syn::synom::Synom;
use util::is_bool;
impl From<DeriveInput> for Input {
fn from(
@ -12,15 +12,11 @@ impl From<DeriveInput> for Input {
vis,
attrs,
generics,
body,
data,
}: DeriveInput,
) -> Self {
let variants = match body {
Body::Enum(body) => body.variants
.into_iter()
.map(Element::into_item)
.map(From::from)
.collect(),
let variants = match data {
Data::Enum(data) => data.variants.into_iter().map(From::from).collect(),
_ => panic!("#[derive(Kind)] only works for enums"),
};
@ -38,13 +34,13 @@ impl Synom for EnumAttrs {
named!(parse -> Self, do_parse!(
_function: syn!(Ident) >>
fns: parens!(
call!(Delimited::parse_terminated)
call!(Punctuated::parse_terminated)
) >>
({
let fns: Delimited<_, tokens::Comma> = fns.0;
let fns: Punctuated<_, token::Comma> = fns.1;
// TODO: Verify `functions`.
EnumAttrs {
fns: fns.into_vec(),
fns: fns.into_iter().collect(),
extras: Default::default(),
}
})
@ -66,12 +62,13 @@ impl AddAssign<Result<Self, Attribute>> for EnumAttrs {
impl FnDef {
fn def_value_for_type(ty: &Type) -> Option<Expr> {
if is_bool(ty) {
return Some(
ExprKind::Lit(Lit {
value: LitKind::Bool(false),
span: SynSpan(Span::call_site()),
}).into(),
);
return Some(Expr::Lit(ExprLit {
attrs: Default::default(),
lit: Lit::Bool(LitBool {
value: false,
span: Span::call_site(),
}),
}));
}
None
@ -81,9 +78,13 @@ impl FnDef {
impl Synom for FnDef {
named!(parse -> Self, do_parse!(
name: syn!(Ident) >>
syn!(tokens::Eq) >>
return_type: syn!(Lit) >>
syn!(token::Eq) >>
return_type: syn!(LitStr) >>
({
if name.as_ref() == "delegate" {
panic!("function name cannot be `delegate`")
}
let return_type = parse_str_as_tokens(return_type);
FnDef {
default_value: FnDef::def_value_for_type(&return_type),
@ -98,14 +99,14 @@ impl From<Variant> for EnumVar {
fn from(
Variant {
attrs,
data,
fields,
ident: name,
..
}: Variant,
) -> Self {
EnumVar {
name,
data,
data: fields,
attrs: parse_attrs(attrs),
}
}
@ -113,13 +114,16 @@ impl From<Variant> for EnumVar {
impl Synom for VariantAttrs {
named!(parse -> Self, do_parse!(
fn_values: call!(Delimited::parse_terminated)
fn_values: call!(Punctuated::parse_terminated)
>>
({
let fn_values: Delimited<_, tokens::Comma> = fn_values;
let fn_values: Punctuated<_, token::Comma> = fn_values;
let has_delegate = fn_values.iter()
.any(|f: &VariantAttr| f.fn_name == "delegate");
VariantAttrs {
fn_values: fn_values.into_vec(),
fn_values: fn_values.into_iter().collect(),
extras: Default::default(),
has_delegate,
}
})
));
@ -131,6 +135,7 @@ impl AddAssign<Result<Self, Attribute>> for VariantAttrs {
Ok(attr) => {
self.fn_values.extend(attr.fn_values);
self.extras.extend(attr.extras);
self.has_delegate = self.has_delegate || attr.has_delegate;
}
Err(attr) => self.extras.push(attr),
}
@ -142,8 +147,8 @@ impl Synom for VariantAttr {
fn_name: syn!(Ident) >>
value: option!(
do_parse!(
syn!(tokens::Eq) >>
p: syn!(Lit) >>
syn!(token::Eq) >>
p: syn!(LitStr) >>
({
parse_str_as_tokens(p)
})
@ -199,11 +204,7 @@ where
}
if is_attr_name(&attr, "kind") {
let tts = attr.tts.into_iter().map(|t| {
// syn::parse doesn't like Vec<syn::TokenTree>.
t.0
});
let tts = unwrap_paren(tts);
let tts = unwrap_paren(attr.tts);
let parsed: T = parse(tts.into())
.unwrap_or_else(|err| panic!("failed to parse attribute: {}", err));
@ -216,14 +217,14 @@ where
res
}
/// Parse content of string literal as if it's tts.
fn parse_str_as_tokens<T>(lit: Lit) -> T
/// Parse content of string literal.
fn parse_str_as_tokens<T>(lit: LitStr) -> T
where
T: Synom,
{
let span = lit.span.0;
let span = lit.span;
// WTF? Literal does not provide a way to get string...
let tt = lit.value.to_string();
let tt = lit.value();
// TODO:Remove '"' only for first and last.
let tts = tt.replace("\"", "")

View File

@ -11,8 +11,7 @@ pub fn is_bool(ty: &Type) -> bool {
},
}) => {
// check for bool
if segments.len() == 1 && segments.first().unwrap().item().ident.sym.as_str() == "bool"
{
if segments.len() == 1 && segments.first().unwrap().value().ident.as_ref() == "bool" {
return true;
}
}
@ -21,21 +20,3 @@ pub fn is_bool(ty: &Type) -> bool {
false
}
pub fn is_attr_name(attr: &Attribute, name: &str) -> bool {
match *attr {
Attribute {
path:
Path {
leading_colon: None,
ref segments,
},
is_sugared_doc: false,
..
} if segments.len() == 1 =>
{
segments.first().unwrap().into_item().ident == name
}
_ => false,
}
}

View File

@ -7,8 +7,10 @@ pub enum Tokens {
#[kind(is_a)]
#[kind(prec = "7")]
A,
#[kind(prec = "6")] StructLike {},
#[kind(prec = "5")] TupleLike(u8),
#[kind(prec = "6")]
StructLike {},
#[kind(prec = "5")]
TupleLike(u8),
#[kind(prec = "6")]
#[cfg(feature = "not-used")]
@ -21,3 +23,20 @@ fn simple_bool() {
assert!(!Tokens::StructLike {}.is_a());
assert!(!Tokens::TupleLike(5).is_a());
}
#[derive(Debug, Kind)]
#[kind(functions(wanted = "bool"))]
pub enum Delegate {
#[kind(wanted)]
Wanted,
#[kind(delegate)]
May(Del),
}
#[derive(Debug, Kind)]
#[kind(functions(wanted = "bool"))]
pub enum Del {
#[kind(wanted)]
Yes,
No,
}

View File

@ -1,20 +0,0 @@
[package]
name = "eq_ignore_span"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[lib]
proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = { git = "https://github.com/kdy1/rust-pmutil" }
proc-macro2 = { version = "0.1", features = ["unstable"] }
[dependencies.syn]
git = "https://github.com/dtolnay/syn"
features = ["full", "parsing", "printing", "extra-traits"]
[dependencies.synom]
git = "https://github.com/dtolnay/syn"

View File

@ -1,299 +0,0 @@
#![feature(box_syntax)]
#[macro_use]
extern crate pmutil;
extern crate proc_macro2;
extern crate proc_macro;
#[macro_use]
extern crate swc_macros_common as common;
extern crate syn;
use common::prelude::*;
use std::iter;
#[proc_macro_derive(EqIgnoreSpan)]
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse(input).expect("failed to parse derive input");
print("derive(EqIgnoreSpan)", expand(input).into_tokens())
}
fn expand(input: DeriveInput) -> Item {
let type_name = &input.ident;
let body = expand_method_body(&input.ident, input.body);
Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
CONST_NAME: type_name.new_ident_with(|n| format!("_IMPL_EQ_IGNORE_SPAN_FOR_{}", n)),
Type: type_name,
body,
},
{
#[allow(non_upper_case_globals)]
const CONST_NAME: () = {
extern crate swc_common as _swc_common;
impl _swc_common::EqIgnoreSpan for Type {
fn eq_ignore_span(&self, __rhs: &Self) -> bool {
body
}
}
()
};
}
))
.parse()
}
/// Bind variants.
/// name is Some(EnumName) for enum, and none for struct.
fn bind_variant(
qual_name: Path,
v: &VariantData,
prefix: &str,
field_binding_mode: BindingMode,
) -> (Pat, Vec<Ident>) {
match *v {
VariantData::Unit => {
// EnumName::VariantName
let pat = Pat::Path(PatPath {
qself: None,
path: qual_name,
});
let pat = Pat::Ref(PatRef {
pat: box pat,
and_token: Span::call_site().as_token(),
mutbl: Mutability::Immutable,
});
// Unit tuple does not have field bindings
(pat, vec![])
}
VariantData::Struct(ref fields, brace_token) => {
let mut bindings = vec![];
let fields = fields
.iter()
.map(Element::into_item)
.map(|f| f.ident.expect("struct field must have ident"))
.map(|ident| {
let binded_ident = ident.new_ident_with(|s| format!("{}{}", prefix, s));
bindings.push(binded_ident.clone());
FieldPat {
ident,
pat: box PatIdent {
mode: field_binding_mode,
ident: binded_ident,
subpat: None,
at_token: None,
}.into(),
is_shorthand: false,
colon_token: Some(ident.span.as_token()),
attrs: Default::default(),
}
})
.collect();
// EnumName::VariantName { fields }
let pat = Pat::Struct(PatStruct {
path: qual_name,
fields,
brace_token,
dot2_token: None,
});
let pat = Pat::Ref(PatRef {
pat: box pat,
and_token: Span::call_site().as_token(),
mutbl: Mutability::Immutable,
});
(pat, bindings)
}
VariantData::Tuple(ref fields, paren_token) => {
// TODO
let mut bindings = vec![];
let pats = fields
.iter()
.map(Element::into_item)
.enumerate()
.map(|(i, _)| {
let binded_ident = Span::call_site().new_ident(format!("{}{}", prefix, i));
bindings.push(binded_ident.clone());
Pat::Ident(PatIdent {
mode: field_binding_mode,
ident: binded_ident,
subpat: None,
at_token: None,
})
})
.collect();
// EnumName::VariantName { fields }
let pat = Pat::TupleStruct(PatTupleStruct {
path: qual_name,
pat: PatTuple {
pats,
paren_token,
dots_pos: None,
dot2_token: None,
comma_token: None,
},
});
let pat = Pat::Ref(PatRef {
pat: box pat,
and_token: Span::call_site().as_token(),
mutbl: Mutability::Immutable,
});
(pat, bindings)
}
}
}
/// Creates method "eq_ignore_span"
fn expand_method_body(name: &Ident, body: Body) -> Expr {
/// qual_name: EnumName::VariantName for enum,
/// StructName for struct
fn arm_for_variant(qual_name: Path, data: &VariantData) -> Arm {
let span = Span::call_site();
let binding_mode = BindingMode::ByRef(span.as_token(), Mutability::Immutable);
let (lhs_pat, lhs_bindings) = bind_variant(qual_name.clone(), data, "lhs_", binding_mode);
let (rhs_pat, rhs_bindings) = bind_variant(qual_name, data, "rhs_", binding_mode);
let guard = (lhs_bindings.into_iter().zip(rhs_bindings))
.map(|(lhs, rhs)| -> Box<Expr> {
box Quote::from_tokens(&lhs)
.quote_with(smart_quote!(Vars { lhs, rhs }, {
_swc_ast_common::EqIgnoreSpan::eq_ignore_span(lhs, rhs)
}))
.parse()
})
.fold(None, |orig, additional_guard| match orig {
Some(orig) => Some(box Quote::new_call_site()
.quote_with(smart_quote!(
Vars {
orig,
additional_guard,
},
{ orig && additional_guard }
))
.parse()),
None => Some(additional_guard),
});
// (lhs_pat, rhs_pat) if guard => true
Arm {
attrs: Default::default(),
pats: vec![
Pat::Tuple(PatTuple {
pats: vec![lhs_pat, rhs_pat].into(),
comma_token: None,
dots_pos: None,
dot2_token: None,
paren_token: span.as_token(),
}),
].into(),
if_token: if guard.is_some() {
Some(span.as_token())
} else {
None
},
guard,
rocket_token: span.as_token(),
body: box ExprKind::Lit(Lit {
span: span.as_syn_span(),
value: LitKind::Bool(true),
}).into(),
comma: Some(span.as_token()),
}
}
/// match *self + delegate to variants
fn body_for_enum(name: &Ident, BodyEnum { variants, .. }: BodyEnum) -> Expr {
let span = Span::call_site();
let arms = variants
.into_iter()
.map(syn::delimited::Element::into_item)
.map(|v| {
arm_for_variant(
// EnumName::VariantName
Path {
leading_colon: None,
segments: vec![name.clone(), v.ident]
.into_iter()
.map(PathSegment::from)
.collect(),
},
&v.data,
)
})
.chain(iter::once({
// _ => false,
Arm {
attrs: Default::default(),
pats: vec![
Pat::Wild(PatWild {
underscore_token: span.as_token(),
}),
].into(),
if_token: None,
guard: None,
rocket_token: span.as_token(),
body: box ExprKind::Lit(Lit {
span: span.as_syn_span(),
value: LitKind::Bool(false),
}).into(),
comma: Some(span.as_token()),
}
}))
.collect();
ExprKind::Match(ExprMatch {
match_token: span.as_token(),
expr: box Quote::new(span)
.quote_with(smart_quote!(Vars {}, { (&*self, &*__rhs) }))
.parse(),
brace_token: span.as_token(),
arms,
}).into()
}
fn body_for_struct(name: &Ident, BodyStruct { data, .. }: BodyStruct) -> Expr {
let span = Span::call_site();
let arms = iter::once(arm_for_variant(name.clone().into(), &data))
.chain(iter::once({
// _ => false,
Arm {
attrs: Default::default(),
pats: vec![
Pat::Wild(PatWild {
underscore_token: span.as_token(),
}),
].into(),
if_token: None,
guard: None,
rocket_token: span.as_token(),
body: box ExprKind::Lit(Lit {
span: span.as_syn_span(),
value: LitKind::Bool(false),
}).into(),
comma: Some(span.as_token()),
}
}))
.collect();
ExprKind::Match(ExprMatch {
match_token: span.as_token(),
expr: box Quote::new(span)
.quote_with(smart_quote!(Vars {}, { (&*self, &*__rhs) }))
.parse(),
brace_token: span.as_token(),
arms,
}).into()
}
match body {
Body::Enum(e) => body_for_enum(name, e),
Body::Struct(s) => body_for_struct(name, s),
}
}

View File

@ -1,13 +1,12 @@
//! Macros used by swc project.
#![allow(unused_import)]
#![feature(macro_reexport)]
#![feature(proc_macros)]
#![allow(unused_imports)]
#[macro_use]
extern crate ast_node;
pub extern crate ast_node;
#[macro_use]
extern crate enum_kind;
#[macro_use]
extern crate eq_ignore_span;
pub extern crate enum_kind;
pub use ast_node::*;
pub use enum_kind::*;
pub use eq_ignore_span::*;

4
scripts/doc.sh Executable file
View File

@ -0,0 +1,4 @@
#!/bin/sh
BASEDIR=$(dirname "$0")
RUSTDOC="$BASEDIR/rustdoc.sh" cargo doc $@

40
scripts/rustdoc.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
set -eu
crate_name() {
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--crate-name)
CRATE_NAME="$2"
shift # past argument
shift # past value
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
echo "$CRATE_NAME"
}
cr=$(crate_name "$@")
if [[ $cr == swc* ]]; then
# We use this instead of --document-private-items to
# make output simillar to usage from outside.
#
# e.g. this inlines self::stmt::*, and when we're using ecmascript::ast,
# we can't use ecmascript::ast::stmt because it's private.
# rustdoc --passes strip-hidden,unindent-comments,\
# collapse-docs,strip-priv-imports,propagate-doc-cfg $@
rustdoc --document-private-items $@
else
rustdoc $@
fi

View File

@ -1,2 +1,4 @@
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecmascript;
pub extern crate swc_macros;

11
testing/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
name = "testing"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_common = { path = "../common" }
slog = "2"
slog-envlogger = "2.1"
slog-term = "2.3"
lazy_static = "1"

65
testing/src/lib.rs Normal file
View File

@ -0,0 +1,65 @@
#![feature(specialization)]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate slog;
extern crate slog_envlogger;
extern crate slog_term;
extern crate swc_common;
use slog::{Drain, Logger};
use std::io::{self, Write};
use swc_common::Span;
use swc_common::fold::{FoldWith, Folder};
pub fn logger() -> Logger {
fn no_timestamp(_: &mut Write) -> io::Result<()> {
Ok(())
}
fn root() -> Logger {
let dec = slog_term::TermDecorator::new()
.force_color()
.stderr()
.build();
let drain = slog_term::FullFormat::new(dec)
.use_custom_timestamp(no_timestamp)
.build();
let drain = slog_envlogger::new(drain);
let drain = std::sync::Mutex::new(drain).fuse();
let logger = Logger::root(drain, o!());
logger
}
lazy_static! {
static ref ROOT: Logger = { root() };
};
// hack for cargo test
println!("");
root()
// ROOT.new(o!())
}
/// Remove all span from `t`.
pub fn drop_span<T>(t: T) -> T
where
T: FoldWith<DropSpan>,
{
Folder::<T>::fold(&mut DropSpan, t)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct DropSpan;
impl Folder<Span> for DropSpan {
fn fold(&mut self, _: Span) -> Span {
Span::default()
}
}
#[macro_export]
macro_rules! assert_eq_ignore_span {
($l:expr, $r:expr) => {{
assert_eq!($crate::drop_span($l), $crate::drop_span($r))
}}
}