Update dependencies

- rustc toolchain to "nightly-2018-05-30"
 - rustfmt to 0.8.6
 - rustc-ap-* to 150
 - syn to 0.14
 - proc-macro2 to 0.4
 - quote to 0.6
This commit is contained in:
강동윤 2018-06-02 18:01:00 +09:00
parent 79f3660f0f
commit 5d5f861b9d
78 changed files with 1226 additions and 839 deletions

View File

@ -1,7 +1,5 @@
required_version = "0.3.6"
reorder_imports = true
reorder_imports_in_group = true
reorder_imported_names = true
required_version = "0.8.2"
use_field_init_shorthand = true
merge_imports = true
wrap_comments = true
write_mode = "replace"
format_strings = true
format_strings = true

View File

@ -10,9 +10,8 @@
"[rust]": {
"editor.formatOnSave": true
},
"rust-client.channel": "nightly-2018-02-01",
"rust-client.channel": "nightly-2018-05-30",
"rust.unstable_features": true,
"rust.workspace_mode": true,
"rust.rustflags": "--cfg procmacro2_semver_exempt",
// Important
"rust.cfg_test": false,

View File

@ -20,6 +20,7 @@ rayon = "0.9"
slog = "2"
slog-envlogger = "2.1"
slog-term = "2.3"
rustc-ap-rustc_data_structures = "150"
[dependencies.clap]
@ -31,6 +32,5 @@ features = [ "suggestions", "color" ]
[profile.bench]
debug = true
[patch.crates-io]
darling = { git = "https://github.com/kdy1/darling", branch = "proc-macro2-nightly" }

View File

@ -8,6 +8,7 @@ atty = "0.2"
fnv = "1"
string_cache = "0.6"
either = "1.4"
rustc-ap-rustc_errors = "16"
rustc-ap-syntax_pos = "16"
rustc-ap-rustc_errors = "150"
rustc-ap-rustc_data_structures = "150"
rustc-ap-syntax_pos = "150"
swc_macros = { path = "../macros" }

View File

@ -1,13 +1,13 @@
use rustc_data_structures::sync::Lrc;
use rustc_errors::CodeMapper;
use std::{env, fs};
use std::cell::{Ref, RefCell};
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::{env, fs};
use syntax_pos::*;
pub struct CodeMap {
files: RefCell<Vec<Rc<FileMap>>>,
files: RefCell<Vec<Lrc<FileMap>>>,
file_loader: Box<FileLoader>,
path_mapping: FilePathMapping,
}
@ -33,19 +33,19 @@ impl CodeMap {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<FileMap>> {
let src = self.file_loader.read_file(path)?;
let filename = path.to_owned().into();
Ok(self.new_filemap(filename, src))
}
pub fn load_file_and_lines(&self, path: &Path) -> io::Result<Rc<FileMap>> {
pub fn load_file_and_lines(&self, path: &Path) -> io::Result<Lrc<FileMap>> {
let src = self.file_loader.read_file(path)?;
let filename = path.to_owned().into();
Ok(self.new_filemap_and_lines(filename, src))
}
pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> {
pub fn files(&self) -> Ref<Vec<Lrc<FileMap>>> {
self.files.borrow()
}
@ -60,8 +60,9 @@ impl CodeMap {
}
/// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
/// intend to set the line information yourself, you should use
/// new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
@ -79,7 +80,7 @@ impl CodeMap {
}
other => (other, false),
};
let filemap = Rc::new(FileMap::new(
let filemap = Lrc::new(FileMap::new(
filename,
was_remapped,
unmapped_path,
@ -93,7 +94,7 @@ impl CodeMap {
}
/// Creates a new filemap and sets its line information.
pub fn new_filemap_and_lines(&self, filename: FileName, src: String) -> Rc<FileMap> {
pub fn new_filemap_and_lines(&self, filename: FileName, src: String) -> Lrc<FileMap> {
// TODO
let fm = self.new_filemap(filename, src.clone());
let mut byte_pos: u32 = fm.start_pos.0;
@ -172,14 +173,14 @@ impl CodeMap {
}
// If the relevant filemap is empty, we don't return a line number.
pub fn lookup_line(&self, pos: BytePos) -> Result<FileMapAndLine, Rc<FileMap>> {
pub fn lookup_line(&self, pos: BytePos) -> Result<FileMapAndLine, Lrc<FileMap>> {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
match f.lookup_line(pos) {
Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
Some(line) => Ok(FileMapAndLine { fm: f, line }),
None => Err(f),
}
}
@ -194,9 +195,10 @@ impl CodeMap {
}
}
/// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
/// there are gaps between lhs and rhs, the resulting union will cross these gaps.
/// For this to work, the spans have to be:
/// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must
/// precede the rhs. If there are gaps between lhs and rhs, the
/// resulting union will cross these gaps. For this to work, the spans
/// have to be:
///
/// * the ctxt of both spans much match
/// * the lhs span needs to end on the same line the rhs span begins
@ -295,7 +297,8 @@ impl CodeMap {
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
// lines.
for line_index in lo.line - 1..hi.line - 1 {
let line_len = lo.file
let line_len = lo
.file
.get_line(line_index)
.map(|s| s.chars().count())
.unwrap_or(0);
@ -316,7 +319,7 @@ impl CodeMap {
Ok(FileLines {
file: lo.file,
lines: lines,
lines,
})
}
@ -390,7 +393,7 @@ impl CodeMap {
self.span_until_char(sp, '{')
}
pub fn get_filemap(&self, filename: &FileName) -> Option<Rc<FileMap>> {
pub fn get_filemap(&self, filename: &FileName) -> Option<Lrc<FileMap>> {
for fm in self.files.borrow().iter() {
if *filename == fm.name {
return Some(fm.clone());
@ -405,10 +408,7 @@ impl CodeMap {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {
fm: fm,
pos: offset,
}
FileMapAndBytePos { fm, pos: offset }
}
/// Converts an absolute BytePos to a CharPos relative to the filemap.
@ -465,7 +465,7 @@ impl CodeMap {
return a;
}
fn ensure_filemap_source_present(&self, file_map: Rc<FileMap>) -> bool {
fn ensure_filemap_source_present(&self, file_map: Lrc<FileMap>) -> bool {
file_map.add_external_src(|| match file_map.name {
FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
_ => None,
@ -567,7 +567,7 @@ impl CodeMapper for CodeMap {
}
sp
}
fn ensure_filemap_source_present(&self, file_map: Rc<FileMap>) -> bool {
fn ensure_filemap_source_present(&self, file_map: Lrc<FileMap>) -> bool {
self.ensure_filemap_source_present(file_map)
}
/// No op.

View File

@ -1,8 +1,8 @@
use super::Handler;
use {MultiSpan, Span};
use rustc_errors::{Diagnostic as RustcDiagnostic, Level};
pub use rustc_errors::{DiagnosticId, DiagnosticStyledString};
use std::fmt;
use {MultiSpan, Span};
#[must_use]
pub struct Diagnostic {
@ -136,8 +136,9 @@ impl Diagnostic {
self
}
/// Prints out a message with a suggested edit of the code. If the suggestion is presented
/// inline it will only show the text message and not the text.
/// Prints out a message with a suggested edit of the code. If the
/// suggestion is presented inline it will only show the text message
/// and not the text.
///
/// See `CodeSuggestion` for more information.
#[inline(always)]
@ -159,7 +160,8 @@ impl Diagnostic {
/// * should not be a question
/// * should not contain any parts like "the following", "as shown"
/// * may look like "to do xyz, use" or "to do xyz, use abc"
/// * may contain a name of a function, variable or type, but not whole expressions
/// * may contain a name of a function, variable or type, but not whole
/// expressions
///
/// See `CodeSuggestion` for more information.
#[inline(always)]

View File

@ -1,8 +1,8 @@
use super::Handler;
use {MultiSpan, Span};
use rustc_errors::{Diagnostic as RustcDiagnostic, DiagnosticBuilder as Builder, Level};
use rustc_errors::{DiagnosticId, DiagnosticStyledString};
use std::fmt;
use {MultiSpan, Span};
#[must_use]
pub struct DiagnosticBuilder<'a> {
@ -111,8 +111,9 @@ impl<'a> DiagnosticBuilder<'a> {
self
}
/// Prints out a message with a suggested edit of the code. If the suggestion is presented
/// inline it will only show the text message and not the text.
/// Prints out a message with a suggested edit of the code. If the
/// suggestion is presented inline it will only show the text message
/// and not the text.
///
/// See `CodeSuggestion` for more information.
pub fn span_suggestion_short(mut self, sp: Span, msg: &str, suggestion: String) -> Self {
@ -133,7 +134,8 @@ impl<'a> DiagnosticBuilder<'a> {
/// * should not be a question
/// * should not contain any parts like "the following", "as shown"
/// * may look like "to do xyz, use" or "to do xyz, use abc"
/// * may contain a name of a function, variable or type, but not whole expressions
/// * may contain a name of a function, variable or type, but not whole
/// expressions
///
/// See `CodeSuggestion` for more information.
pub fn span_suggestion(mut self, sp: Span, msg: &str, suggestion: String) -> Self {

View File

@ -1,9 +1,11 @@
use super::{Diagnostic, DiagnosticBuilder};
use rustc_errors::{CodeMapper, ColorConfig, DiagnosticBuilder as RustcDiagnosticBuilder,
Handler as RustcHandler, Level};
pub use rustc_errors::HandlerFlags;
use rustc_data_structures::sync::{Lrc, Send, Sync};
pub use rustc_errors::emitter::{Emitter, EmitterWriter};
use std::rc::Rc;
pub use rustc_errors::HandlerFlags;
use rustc_errors::{
CodeMapper, ColorConfig, DiagnosticBuilder as RustcDiagnosticBuilder, Handler as RustcHandler,
Level,
};
/// A handler deals with errors.
pub struct Handler {
@ -15,20 +17,20 @@ impl Handler {
color_config: ColorConfig,
can_emit_warnings: bool,
treat_err_as_bug: bool,
cm: Option<Rc<CodeMapper>>,
cm: Option<Lrc<CodeMapper + Send + Sync>>,
) -> Self {
RustcHandler::with_tty_emitter(color_config, can_emit_warnings, treat_err_as_bug, cm).into()
}
pub fn with_tty_emitter_and_flags(
color_config: ColorConfig,
cm: Option<Rc<CodeMapper>>,
cm: Option<Lrc<CodeMapper + Send + Sync>>,
flags: HandlerFlags,
) -> Self {
RustcHandler::with_tty_emitter_and_flags(color_config, cm, flags).into()
}
pub fn with_emitter(e: Box<Emitter>, flags: HandlerFlags) -> Self {
pub fn with_emitter(e: Box<Emitter + Send>, flags: HandlerFlags) -> Self {
RustcHandler::with_emitter_and_flags(e, flags).into()
}
@ -60,14 +62,13 @@ impl Handler {
self.inner.abort_if_errors()
}
pub fn track_diagnostics<F, R>(&self, f: F) -> (R, Vec<Diagnostic>)
where
F: FnOnce() -> R,
{
let (result, errors) = self.inner.track_diagnostics(f);
(result, errors.into_iter().map(From::from).collect())
}
// pub fn track_diagnostics<F, R>(&self, f: F) -> (R, Vec<Diagnostic>)
// where
// F: FnOnce() -> R,
// {
// let (result, errors) = self.inner.track_diagnostics(f);
// (result, errors.into_iter().map(From::from).collect())
// }
}
impl From<RustcHandler> for Handler {

View File

@ -9,8 +9,8 @@ pub use self::codemap::{CodeMap, FileLoader, FilePathMapping, RealFileLoader};
pub use self::diagnostic::*;
pub use self::diagnostic_builder::DiagnosticBuilder;
pub use self::handler::*;
pub use rustc_errors::{ColorConfig, Level};
pub use rustc_errors::Level::*;
pub use rustc_errors::{ColorConfig, Level};
mod codemap;
mod diagnostic;

View File

@ -1,8 +1,8 @@
use super::*;
use {BytePos, Span};
use std::io;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use {BytePos, Span};
struct MyFileLoader;
impl FileLoader for MyFileLoader {
@ -32,7 +32,8 @@ function foo() {
#[test]
fn test() {
let cm = CodeMap::with_file_loader(box MyFileLoader, FilePathMapping::empty());
let file_map = cm.load_file_and_lines("tmp.js".as_ref())
let file_map = cm
.load_file_and_lines("tmp.js".as_ref())
.expect("failed to load tmp.js");
println!(
"File (start={},end={})",
@ -44,23 +45,25 @@ fn test() {
let handler = Handler::with_tty_emitter(ColorConfig::Always, false, false, Some(Rc::new(cm)));
DiagnosticBuilder::new_with_code(
&handler,
Error,
Some(DiagnosticId::Error("ABCDE".into())),
"Test span_label",
).span(full)
.emit();
::syntax_pos::GLOBALS.set(&::syntax_pos::Globals::new(), || {
DiagnosticBuilder::new_with_code(
&handler,
Error,
Some(DiagnosticId::Error("ABCDE".into())),
"Test span_label",
).span(full)
.emit();
DiagnosticBuilder::new_with_code(
&handler,
Warning,
Some(DiagnosticId::Lint("WITH_STMT".into())),
"Lint: With statement",
).span(Span::new(
start_pos + BytePos(21),
start_pos + BytePos(25),
Default::default(),
))
.emit();
DiagnosticBuilder::new_with_code(
&handler,
Warning,
Some(DiagnosticId::Lint("WITH_STMT".into())),
"Lint: With statement",
).span(Span::new(
start_pos + BytePos(21),
start_pos + BytePos(25),
Default::default(),
))
.emit();
})
}

View File

@ -1,5 +1,4 @@
#![feature(box_syntax)]
#![feature(i128_type)]
#![feature(range_contains)]
#![feature(try_trait)]
#![feature(proc_macro)]
@ -8,15 +7,16 @@
extern crate atty;
extern crate either;
extern crate fnv;
extern crate rustc_data_structures;
extern crate rustc_errors;
extern crate string_cache;
extern crate swc_macros;
extern crate syntax_pos;
pub use self::ast_node::AstNode;
pub use self::fold::{Fold, FoldWith};
#[deprecated(note = "please use Fold instead")]
pub use self::fold::Fold as Folder;
pub use self::fold::{Fold, FoldWith};
pub use self::pos::*;
mod ast_node;

View File

@ -1,7 +1,9 @@
use fold::FoldWith;
pub use swc_macros::Spanned;
pub use syntax_pos::{hygiene, BytePos, ExpnFormat, ExpnInfo, FileMap, FileName, MultiSpan,
NameAndSpan, Span, SpanData, SyntaxContext, DUMMY_SP, NO_EXPANSION};
pub use syntax_pos::{
hygiene, BytePos, ExpnFormat, ExpnInfo, FileMap, FileName, MultiSpan, NameAndSpan, Span,
SpanData, SyntaxContext, DUMMY_SP, NO_EXPANSION,
};
///
/// # Derive

View File

@ -1,5 +1,6 @@
use super::{AssignOp, BinaryOp, BlockStmt, Class, Function, Ident, Lit, Pat, Prop, UnaryOp,
UpdateOp};
use super::{
AssignOp, BinaryOp, BlockStmt, Class, Function, Ident, Lit, Pat, Prop, UnaryOp, UpdateOp,
};
use swc_common::{Span, Spanned};
use swc_macros::ast_node;
@ -30,9 +31,10 @@ pub enum Expr {
// left: Box<Expr>,
// right: Box<Expr>,
// },
/// A member expression. If computed is true, the node corresponds to a computed
/// (a[b]) member expression and property is an Expression. If computed is false, the node
/// corresponds to a static (a.b) member expression and property is an Identifier.
/// A member expression. If computed is true, the node corresponds to a
/// computed (a[b]) member expression and property is an Expression. If
/// computed is false, the node corresponds to a static (a.b) member
/// expression and property is an Identifier.
Member(MemberExpr),
/// true ? 'a' : 'b'

View File

@ -16,37 +16,44 @@ extern crate swc_macros;
pub use self::class::{Class, ClassMethod, ClassMethodKind};
pub use self::decl::{ClassDecl, Decl, FnDecl, VarDecl, VarDeclKind, VarDeclarator};
pub use self::expr::{ArrayLit, ArrowExpr, AssignExpr, AwaitExpr, BinExpr, BlockStmtOrExpr,
CallExpr, ClassExpr, CondExpr, Expr, ExprOrSpread, ExprOrSuper, FnExpr,
MemberExpr, MetaPropExpr, NewExpr, ObjectLit, ParenExpr, PatOrExpr, SeqExpr,
ThisExpr, TplElement, TplLit, UnaryExpr, UpdateExpr, YieldExpr};
pub use self::expr::{
ArrayLit, ArrowExpr, AssignExpr, AwaitExpr, BinExpr, BlockStmtOrExpr, CallExpr, ClassExpr,
CondExpr, Expr, ExprOrSpread, ExprOrSuper, FnExpr, MemberExpr, MetaPropExpr, NewExpr,
ObjectLit, ParenExpr, PatOrExpr, SeqExpr, ThisExpr, TplElement, TplLit, UnaryExpr, UpdateExpr,
YieldExpr,
};
pub use self::function::Function;
pub use self::lit::{Bool, Lit, Null, Number, Regex, RegexFlags, Str};
pub use self::module::{Module, ModuleItem};
pub use self::module_decl::{ExportAll, ExportDefaultDecl, ExportSpecifier, ImportDecl,
ImportDefault, ImportSpecific, ImportSpecifier, ImportStarAs,
ModuleDecl, NamedExport};
pub use self::module_decl::{
ExportAll, ExportDefaultDecl, ExportSpecifier, ImportDecl, ImportDefault, ImportSpecific,
ImportSpecifier, ImportStarAs, ModuleDecl, NamedExport,
};
pub use self::operators::{AssignOp, BinaryOp, UnaryOp, UpdateOp};
pub use self::pat::{ArrayPat, AssignPat, AssignPatProp, KeyValuePatProp, ObjectPat, ObjectPatProp,
Pat, RestPat};
pub use self::prop::{AssignProp, GetterProp, KeyValueProp, MethodProp, Prop, PropName, SetterProp};
pub use self::stmt::{BlockStmt, BreakStmt, CatchClause, ContinueStmt, DebuggerStmt, DoWhileStmt,
EmptyStmt, ForInStmt, ForOfStmt, ForStmt, IfStmt, LabeledStmt, ReturnStmt,
Stmt, SwitchCase, SwitchStmt, ThrowStmt, TryStmt, VarDeclOrExpr,
VarDeclOrPat, WhileStmt, WithStmt};
pub use self::pat::{
ArrayPat, AssignPat, AssignPatProp, KeyValuePatProp, ObjectPat, ObjectPatProp, Pat, RestPat,
};
pub use self::prop::{
AssignProp, GetterProp, KeyValueProp, MethodProp, Prop, PropName, SetterProp,
};
pub use self::stmt::{
BlockStmt, BreakStmt, CatchClause, ContinueStmt, DebuggerStmt, DoWhileStmt, EmptyStmt,
ForInStmt, ForOfStmt, ForStmt, IfStmt, LabeledStmt, ReturnStmt, Stmt, SwitchCase, SwitchStmt,
ThrowStmt, TryStmt, VarDeclOrExpr, VarDeclOrPat, WhileStmt, WithStmt,
};
use std::fmt::{self, Debug, Display, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
use swc_macros::Fold;
mod macros;
mod class;
mod decl;
mod expr;
mod function;
mod lit;
mod module_decl;
mod macros;
mod module;
mod module_decl;
mod operators;
mod pat;
mod prop;

View File

@ -5,53 +5,145 @@
/// Binary +,- is `op!(bin, "+")`, `op!(bin, "-")`.
#[macro_export]
macro_rules! op {
(unary, "-") => { $crate::UnaryOp::Minus };
(unary, "+") => { $crate::UnaryOp::Plus };
("!") => { $crate::UnaryOp::Bang };
("~") => { $crate::UnaryOp::Tilde };
("typeof") => { $crate::UnaryOp::TypeOf };
("void") => { $crate::UnaryOp::Void };
("delete") => { $crate::UnaryOp::Delete };
(unary,"-") => {
$crate::UnaryOp::Minus
};
(unary,"+") => {
$crate::UnaryOp::Plus
};
("!") => {
$crate::UnaryOp::Bang
};
("~") => {
$crate::UnaryOp::Tilde
};
("typeof") => {
$crate::UnaryOp::TypeOf
};
("void") => {
$crate::UnaryOp::Void
};
("delete") => {
$crate::UnaryOp::Delete
};
("++") => { $crate::UpdateOp::PlusPlus };
("--") => { $crate::UpdateOp::MinusMinus };
("++") => {
$crate::UpdateOp::PlusPlus
};
("--") => {
$crate::UpdateOp::MinusMinus
};
("==") => { $crate::BinaryOp::EqEq };
("!=") => { $crate::BinaryOp::NotEq };
("===") => { $crate::BinaryOp::EqEqEq };
("!==") => { $crate::BinaryOp::NotEqEq };
("<") => { $crate::BinaryOp::Lt };
("<=") => { $crate::BinaryOp::LtEq };
(">") => { $crate::BinaryOp::Gt };
(">=") => { $crate::BinaryOp::GtEq };
("<<") => { $crate::BinaryOp::LShift };
(">>") => { $crate::BinaryOp::RShift };
(">>>") => { $crate::BinaryOp::ZeroFillRShift };
(bin, "+") => { $crate::BinaryOp::Add };
(bin, "-") => { $crate::BinaryOp::Sub };
("*") => { $crate::BinaryOp::Mul };
("/") => { $crate::BinaryOp::Div };
("%") => { $crate::BinaryOp::Mod };
("|") => { $crate::BinaryOp::BitOr };
("^") => { $crate::BinaryOp::BitXor };
("&") => { $crate::BinaryOp::BitAnd };
("||") => { $crate::BinaryOp::LogicalOr };
("&&") => { $crate::BinaryOp::LogicalAnd };
("in") => { $crate::BinaryOp::In };
("instanceof") => { $crate::BinaryOp::InstanceOf };
("**") => { $crate::BinaryOp::Exp };
("==") => {
$crate::BinaryOp::EqEq
};
("!=") => {
$crate::BinaryOp::NotEq
};
("===") => {
$crate::BinaryOp::EqEqEq
};
("!==") => {
$crate::BinaryOp::NotEqEq
};
("<") => {
$crate::BinaryOp::Lt
};
("<=") => {
$crate::BinaryOp::LtEq
};
(">") => {
$crate::BinaryOp::Gt
};
(">=") => {
$crate::BinaryOp::GtEq
};
("<<") => {
$crate::BinaryOp::LShift
};
(">>") => {
$crate::BinaryOp::RShift
};
(">>>") => {
$crate::BinaryOp::ZeroFillRShift
};
(bin,"+") => {
$crate::BinaryOp::Add
};
(bin,"-") => {
$crate::BinaryOp::Sub
};
("*") => {
$crate::BinaryOp::Mul
};
("/") => {
$crate::BinaryOp::Div
};
("%") => {
$crate::BinaryOp::Mod
};
("|") => {
$crate::BinaryOp::BitOr
};
("^") => {
$crate::BinaryOp::BitXor
};
("&") => {
$crate::BinaryOp::BitAnd
};
("||") => {
$crate::BinaryOp::LogicalOr
};
("&&") => {
$crate::BinaryOp::LogicalAnd
};
("in") => {
$crate::BinaryOp::In
};
("instanceof") => {
$crate::BinaryOp::InstanceOf
};
("**") => {
$crate::BinaryOp::Exp
};
("=") => { $crate::AssignOp::Assign };
("+=") => { $crate::AssignOp::AddAssign };
("-=") => { $crate::AssignOp::SubAssign };
("*=") => { $crate::AssignOp::MulAssign };
("/=") => { $crate::AssignOp::DivAssign };
("%=") => { $crate::AssignOp::ModAssign };
("<<=") => { $crate::AssignOp::LShiftAssign };
(">>=") => { $crate::AssignOp::RShiftAssign };
(">>>=") => { $crate::AssignOp::ZeroFillRShiftAssign };
("|=") => { $crate::AssignOp::BitOrAssign };
("^=") => { $crate::AssignOp::BitXorAssign };
("&=") => { $crate::AssignOp::BitAndAssign };
("**=") => { $crate::AssignOp::ExpAssign };
("=") => {
$crate::AssignOp::Assign
};
("+=") => {
$crate::AssignOp::AddAssign
};
("-=") => {
$crate::AssignOp::SubAssign
};
("*=") => {
$crate::AssignOp::MulAssign
};
("/=") => {
$crate::AssignOp::DivAssign
};
("%=") => {
$crate::AssignOp::ModAssign
};
("<<=") => {
$crate::AssignOp::LShiftAssign
};
(">>=") => {
$crate::AssignOp::RShiftAssign
};
(">>>=") => {
$crate::AssignOp::ZeroFillRShiftAssign
};
("|=") => {
$crate::AssignOp::BitOrAssign
};
("^=") => {
$crate::AssignOp::BitXorAssign
};
("&=") => {
$crate::AssignOp::BitAndAssign
};
("**=") => {
$crate::AssignOp::ExpAssign
};
}

View File

@ -68,7 +68,8 @@ pub struct ImportStarAs {
pub local: Ident,
}
/// e.g. local = foo, imported = None `import { foo } from 'mod.js'`
/// e.g. local = bar, imported = Some(foo) for `import { foo as bar } from 'mod.js'`
/// e.g. local = bar, imported = Some(foo) for `import { foo as bar } from
/// 'mod.js'`
#[ast_node]
pub struct ImportSpecific {
pub span: Span,

View File

@ -2,8 +2,8 @@ use self::SyntaxError::*;
use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
use swc_common::errors::{DiagnosticBuilder, Handler};
use swc_common::Span;
use token::Token;
#[derive(Copy, Clone)]

View File

@ -8,13 +8,13 @@ pub use self::input::Input;
use self::input::LexerInput;
use self::state::State;
use self::util::*;
use {Context, Session};
use ast::Str;
use error::SyntaxError;
use std::char;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use token::*;
use {Context, Session};
pub mod input;
mod number;
@ -354,9 +354,7 @@ impl<'a, I: Input> Lexer<'a, I> {
};
self.bump();
}
_ => {
return Ok(Some(value as char))
},
_ => return Ok(Some(value as char)),
}
}};
}
@ -392,15 +390,17 @@ impl<'a, I: Input> Lexer<'a, I> {
fn read_token_lt_gt(&mut self) -> LexResult<Option<Token>> {
assert!(self.cur() == Some('<') || self.cur() == Some('>'));
let start = self.cur_pos();
let c = self.cur().unwrap();
self.bump();
// XML style comment. `<!--`
if !self.ctx.module && c == '<' && self.is('!') && self.peek() == Some('-')
&& self.peek_ahead() == Some('-')
{
if c == '<' && self.is('!') && self.peek() == Some('-') && self.peek_ahead() == Some('-') {
self.skip_line_comment(3);
self.skip_space()?;
if self.ctx.module {
self.error(start, SyntaxError::LegacyCommentInModule)?;
}
return self.read_token();
}

View File

@ -50,15 +50,13 @@ impl<'a, I: Input> Lexer<'a, I> {
// strict mode hates non-zero decimals starting with zero.
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
if self.ctx.strict {
self.error(start, SyntaxError::LegacyDecimal)?
}
let s = format!("{}", val); // TODO: Remove allocation.
// if it contains '8' or '9', it's decimal.
if s.contains('8') || s.contains('9') {
if self.ctx.strict {
self.error(start, SyntaxError::LegacyDecimal)?
}
} else {
// It's Legacy octal, and we should reinterpret value.
let val = u64::from_str_radix(&format!("{}", val), 8)
@ -279,8 +277,8 @@ impl<'a, I: Input> Lexer<'a, I> {
#[cfg(test)]
mod tests {
use super::*;
use super::input::FileMapInput;
use super::*;
use std::f64::INFINITY;
use std::panic;

View File

@ -92,7 +92,8 @@ impl Default for State {
impl State {
pub fn can_skip_space(&self) -> bool {
!self.context
!self
.context
.current()
.map(|t| t.preserve_space())
.unwrap_or(false)
@ -184,7 +185,8 @@ impl State {
// for (a of b) {}
tok!("of") if Some(Type::ParenStmt { is_for_loop: true }) == context.current() => {
// e.g. for (a of _) => true
!prev.expect("context.current() if ParenStmt, so prev token cannot be None")
!prev
.expect("context.current() if ParenStmt, so prev token cannot be None")
.before_expr()
}
@ -261,8 +263,8 @@ impl State {
#[derive(Debug, Default)]
struct Context(Vec<Type>);
impl Context {
/// Returns true if following `LBrace` token is `block statement` according to
/// `ctx`, `prev`, `is_expr_allowed`.
/// Returns true if following `LBrace` token is `block statement` according
/// to `ctx`, `prev`, `is_expr_allowed`.
fn is_brace_block(
&self,
prev: Option<Token>,
@ -332,17 +334,21 @@ impl Context {
#[kind(fucntion(is_expr = "bool", preserve_space = "bool"))]
enum Type {
BraceStmt,
#[kind(is_expr)] BraceExpr,
#[kind(is_expr)] TplQuasi,
#[kind(is_expr)]
BraceExpr,
#[kind(is_expr)]
TplQuasi,
ParenStmt {
/// Is this `for` loop?
is_for_loop: bool,
},
#[kind(is_expr)] ParenExpr,
#[kind(is_expr)]
ParenExpr,
#[kind(is_expr, preserve_space)]
Tpl {
/// Start of a template literal.
start: BytePos,
},
#[kind(is_expr)] FnExpr,
#[kind(is_expr)]
FnExpr,
}

View File

@ -1,5 +1,6 @@
use super::*;
use super::input::FileMapInput;
use super::*;
use error::{Error, SyntaxError};
use std::ops::Range;
use std::str;
@ -22,9 +23,15 @@ where
}
fn lex(s: &'static str) -> Vec<TokenAndSpan> {
println!("Source:\n{}", s);
with_lexer(s, |l| l.collect())
}
fn lex_module(s: &'static str) -> Vec<TokenAndSpan> {
with_lexer(s, |l| {
l.ctx.strict = true;
l.ctx.module = true;
l.collect()
})
}
fn lex_tokens(s: &'static str) -> Vec<Token> {
with_lexer(s, |l| l.map(|ts| ts.token).collect())
}
@ -114,6 +121,60 @@ impl WithSpan for AssignOpToken {
AssignOp(self)
}
}
#[test]
fn module_legacy_octal() {
assert_eq!(
lex_module("01"),
vec![
Token::Error(Error {
span: sp(0..2),
error: SyntaxError::LegacyOctal,
}).span(0..2)
.lb(),
]
);
}
#[test]
fn module_legacy_decimal() {
assert_eq!(
lex_module("08"),
vec![
Token::Error(Error {
span: sp(0..2),
error: SyntaxError::LegacyDecimal,
}).span(0..2)
.lb(),
]
);
}
#[test]
fn module_legacy_comment_1() {
assert_eq!(
lex_module("<!-- foo oo"),
vec![
Token::Error(Error {
span: sp(0..11),
error: SyntaxError::LegacyCommentInModule,
}).span(0..11)
.lb(),
]
)
}
#[test]
fn module_legacy_comment_2() {
assert_eq!(
lex_module("-->"),
vec![
Token::Error(Error {
span: sp(0..3),
error: SyntaxError::LegacyCommentInModule,
}).span(0..3)
.lb(),
]
)
}
#[test]
fn test262_lexer_error_0001() {
@ -606,21 +667,17 @@ fn migrated_0006() {
#[test]
fn str_lit() {
assert_eq!(
vec![
Token::Str {
value: "abcde".into(),
has_escape: false,
},
],
vec![Token::Str {
value: "abcde".into(),
has_escape: false,
}],
lex_tokens("'abcde'")
);
assert_eq_ignore_span!(
vec![
Token::Str {
value: "abc".into(),
has_escape: true,
},
],
vec![Token::Str {
value: "abc".into(),
has_escape: true,
}],
lex_tokens("'\\\nabc'")
);
}

View File

@ -7,8 +7,8 @@
//!
//! Note: Currently this use xid instead of id because unicode_xid crate
//! exists.
use super::{LexResult, Lexer};
use super::input::Input;
use super::{LexResult, Lexer};
use error::{ErrorToDiag, SyntaxError};
use swc_common::{BytePos, Span};
use unicode_xid::UnicodeXID;

View File

@ -7,7 +7,6 @@
#![feature(proc_macro)]
#![feature(try_from)]
#![feature(try_trait)]
#![cfg_attr(test, feature(conservative_impl_trait))]
#![deny(unreachable_patterns)]
#![deny(unsafe_code)]
@ -34,8 +33,8 @@ use swc_common::errors::Handler;
mod macros;
mod error;
mod lexer;
mod token;
mod parser;
mod token;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct Config {
@ -78,8 +77,8 @@ where
F: FnOnce(Session, FileMapInput) -> Ret,
{
use std::rc::Rc;
use swc_common::FileName;
use swc_common::errors::{CodeMap, FilePathMapping};
use swc_common::FileName;
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let fm = cm.new_filemap(FileName::Real("testing".into()), src.into());

View File

@ -1,87 +1,230 @@
macro_rules! tok {
('`') => { Token::BackQuote };
('`') => {
Token::BackQuote
};
// (';') => { Token::Semi };
(',') => { Token::Comma };
('?') => { Token::QuestionMark };
(':') => { Token::Colon };
("::") => { Token::ColonColon };
('.') => { Token::Dot };
("=>") => { Token::Arrow };
("...") => { Token::DotDotDot };
("${") => { Token::DollarLBrace };
(',') => {
Token::Comma
};
('?') => {
Token::QuestionMark
};
(':') => {
Token::Colon
};
("::") => {
Token::ColonColon
};
('.') => {
Token::Dot
};
("=>") => {
Token::Arrow
};
("...") => {
Token::DotDotDot
};
("${") => {
Token::DollarLBrace
};
('+') => { Token::BinOp(Add) };
('-') => { Token::BinOp(Sub) };
('*') => { Token::BinOp(Mul) };
('/') => { Token::BinOp(Div) };
("/=") => { Token::AssignOp(DivAssign) };
('%') => { Token::BinOp(Mod) };
('!') => { Token::Bang };
('~') => { Token::Tilde };
('<') => { Token::BinOp(Lt) };
('>') => { Token::BinOp(Gt) };
('+') => {
Token::BinOp(Add)
};
('-') => {
Token::BinOp(Sub)
};
('*') => {
Token::BinOp(Mul)
};
('/') => {
Token::BinOp(Div)
};
("/=") => {
Token::AssignOp(DivAssign)
};
('%') => {
Token::BinOp(Mod)
};
('!') => {
Token::Bang
};
('~') => {
Token::Tilde
};
('<') => {
Token::BinOp(Lt)
};
('>') => {
Token::BinOp(Gt)
};
("++") => { Token::PlusPlus };
("--") => { Token::MinusMinus };
("++") => {
Token::PlusPlus
};
("--") => {
Token::MinusMinus
};
('=') => { Token::AssignOp(Assign) };
('=') => {
Token::AssignOp(Assign)
};
('(') => {
Token::LParen
};
(')') => {
Token::RParen
};
('{') => {
Token::LBrace
};
('}') => {
Token::RBrace
};
('[') => {
Token::LBracket
};
(']') => {
Token::RBracket
};
('(') => { Token::LParen };
(')') => { Token::RParen };
('{') => { Token::LBrace };
('}') => { Token::RBrace };
('[') => { Token::LBracket };
(']') => { Token::RBracket };
("async") => { Token::Word(Word::Ident(js_word!("async"))) };
("as") => { Token::Word(Word::Ident(js_word!("as"))) };
("await") => { Token::Word(Keyword(Await)) };
("break") => { Token::Word(Keyword(Break)) };
("case") => { Token::Word(Keyword(Case)) };
("catch") => { Token::Word(Keyword(Catch)) };
("class") => { Token::Word(Keyword(Class)) };
("const") => { Token::Word(Keyword(Const)) };
("continue") => { Token::Word(Keyword(Continue)) };
("debugger") => { Token::Word(Keyword(Debugger)) };
("default") => { Token::Word(Keyword(Default_)) };
("delete") => { Token::Word(Keyword(Delete)) };
("do") => { Token::Word(Keyword(Do)) };
("else") => { Token::Word(Keyword(Else)) };
("export") => { Token::Word(Keyword(Export)) };
("extends") => { Token::Word(Keyword(Extends)) };
("false") => { Token::Word(False) };
("finally") => { Token::Word(Keyword(Finally)) };
("for") => { Token::Word(Keyword(For)) };
("from") => { Token::Word(Word::Ident(js_word!("from"))) };
("function") => { Token::Word(Keyword(Function)) };
("if") => { Token::Word(Keyword(If)) };
("in") => { Token::Word(Keyword(In)) };
("import") => { Token::Word(Keyword(Import)) };
("let") => { Token::Word(Keyword(Let)) };
("new") => { Token::Word(Keyword(New)) };
("null") => { Token::Word(Null) };
("of") => { Token::Word(Ident(js_word!("of"))) };
("return") => { Token::Word(Keyword(Return)) };
("super") => { Token::Word(Keyword(Super)) };
("static") => { Token::Word(Word::Ident(js_word!("static"))) };
("switch") => { Token::Word(Keyword(Switch)) };
("target") => { Token::Word(Word::Ident(js_word!("target"))) };
("this") => { Token::Word(Keyword(This)) };
("throw") => { Token::Word(Keyword(Throw)) };
("true") => { Token::Word(True) };
("try") => { Token::Word(Keyword(Try)) };
("typeof") => { Token::Word(Keyword(TypeOf)) };
("var") => { Token::Word(Keyword(Var)) };
("void") => { Token::Word(Keyword(Void)) };
("while") => { Token::Word(Keyword(While)) };
("with") => { Token::Word(Keyword(With)) };
("yield") => { Token::Word(Keyword(Yield)) };
("async") => {
Token::Word(Word::Ident(js_word!("async")))
};
("as") => {
Token::Word(Word::Ident(js_word!("as")))
};
("await") => {
Token::Word(Keyword(Await))
};
("break") => {
Token::Word(Keyword(Break))
};
("case") => {
Token::Word(Keyword(Case))
};
("catch") => {
Token::Word(Keyword(Catch))
};
("class") => {
Token::Word(Keyword(Class))
};
("const") => {
Token::Word(Keyword(Const))
};
("continue") => {
Token::Word(Keyword(Continue))
};
("debugger") => {
Token::Word(Keyword(Debugger))
};
("default") => {
Token::Word(Keyword(Default_))
};
("delete") => {
Token::Word(Keyword(Delete))
};
("do") => {
Token::Word(Keyword(Do))
};
("else") => {
Token::Word(Keyword(Else))
};
("export") => {
Token::Word(Keyword(Export))
};
("extends") => {
Token::Word(Keyword(Extends))
};
("false") => {
Token::Word(False)
};
("finally") => {
Token::Word(Keyword(Finally))
};
("for") => {
Token::Word(Keyword(For))
};
("from") => {
Token::Word(Word::Ident(js_word!("from")))
};
("function") => {
Token::Word(Keyword(Function))
};
("if") => {
Token::Word(Keyword(If))
};
("in") => {
Token::Word(Keyword(In))
};
("import") => {
Token::Word(Keyword(Import))
};
("let") => {
Token::Word(Keyword(Let))
};
("new") => {
Token::Word(Keyword(New))
};
("null") => {
Token::Word(Null)
};
("of") => {
Token::Word(Ident(js_word!("of")))
};
("return") => {
Token::Word(Keyword(Return))
};
("super") => {
Token::Word(Keyword(Super))
};
("static") => {
Token::Word(Word::Ident(js_word!("static")))
};
("switch") => {
Token::Word(Keyword(Switch))
};
("target") => {
Token::Word(Word::Ident(js_word!("target")))
};
("this") => {
Token::Word(Keyword(This))
};
("throw") => {
Token::Word(Keyword(Throw))
};
("true") => {
Token::Word(True)
};
("try") => {
Token::Word(Keyword(Try))
};
("typeof") => {
Token::Word(Keyword(TypeOf))
};
("var") => {
Token::Word(Keyword(Var))
};
("void") => {
Token::Word(Keyword(Void))
};
("while") => {
Token::Word(Keyword(While))
};
("with") => {
Token::Word(Keyword(With))
};
("yield") => {
Token::Word(Keyword(Yield))
};
}
macro_rules! token_including_semi {
(';') => { Token::Semi };
($t:tt) => { tok!($t) };
(';') => {
Token::Semi
};
($t:tt) => {
tok!($t)
};
}

View File

@ -1,7 +1,7 @@
//! Parser for function expression and function declaration.
use super::*;
use super::ident::MaybeOptionalIdentParser;
use super::*;
#[parser]
impl<'a, I: Input> Parser<'a, I> {
@ -212,38 +212,37 @@ impl<'a, I: Input> Parser<'a, I> {
if eat!('*') {
let span_of_gen = span!(start);
let key = self.parse_prop_name()?;
return self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
None,
Some(span_of_gen),
).map(|function| ClassMethod {
span: span!(start),
static_token,
key,
function,
kind: ClassMethodKind::Method,
});
return self
.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
None,
Some(span_of_gen),
)
.map(|function| ClassMethod {
span: span!(start),
static_token,
key,
function,
kind: ClassMethodKind::Method,
});
}
// Handle static(){}
if let Some(static_token) = static_token {
if is!('(') {
return self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
None,
None,
).map(|function| ClassMethod {
span: span!(start),
static_token: None,
key: PropName::Ident(Ident {
span: static_token,
sym: js_word!("static"),
}),
function,
kind: ClassMethodKind::Method,
});
return self
.parse_fn_args_body(start, Parser::parse_unique_formal_params, None, None)
.map(|function| ClassMethod {
span: span!(start),
static_token: None,
key: PropName::Ident(Ident {
span: static_token,
sym: js_word!("static"),
}),
function,
kind: ClassMethodKind::Method,
});
}
}
@ -251,9 +250,8 @@ impl<'a, I: Input> Parser<'a, I> {
// Handle `a(){}` (and async(){} / get(){} / set(){})
if is!('(') {
debug_assert_eq!(static_token, None);
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, None, None)
return self
.parse_fn_args_body(start, Parser::parse_unique_formal_params, None, None)
.map(|function| ClassMethod {
span: span!(start),
static_token,
@ -277,7 +275,8 @@ impl<'a, I: Input> Parser<'a, I> {
let key = self.parse_prop_name()?;
return match ident.sym {
js_word!("get") => self.parse_fn_args_body(start, |_| Ok(vec![]), None, None)
js_word!("get") => self
.parse_fn_args_body(start, |_| Ok(vec![]), None, None)
.map(|function| ClassMethod {
span: span!(start),
static_token,
@ -285,30 +284,34 @@ impl<'a, I: Input> Parser<'a, I> {
function,
kind: ClassMethodKind::Getter,
}),
js_word!("set") => self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
None,
None,
).map(|function| ClassMethod {
span: span!(start),
key,
static_token,
function,
kind: ClassMethodKind::Setter,
}),
js_word!("async") => self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
Some(ident.span),
None,
).map(|function| ClassMethod {
span: span!(start),
static_token,
key,
function,
kind: ClassMethodKind::Method,
}),
js_word!("set") => {
self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
None,
None,
).map(|function| ClassMethod {
span: span!(start),
key,
static_token,
function,
kind: ClassMethodKind::Setter,
})
}
js_word!("async") => {
self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
Some(ident.span),
None,
).map(|function| ClassMethod {
span: span!(start),
static_token,
key,
function,
kind: ClassMethodKind::Method,
})
}
_ => unreachable!(),
};
}

View File

@ -1,6 +1,6 @@
use super::*;
use super::pat::PatType;
use super::util::ExprExt;
use super::*;
use swc_common::Spanned;
mod ops;
@ -110,7 +110,8 @@ impl<'a, I: Input> Parser<'a, I> {
let _ = cur!();
let start = cur_pos!();
let can_be_arrow = self.state
let can_be_arrow = self
.state
.potential_arrow_start
.map(|s| s == cur_pos!())
.unwrap_or(false);
@ -120,9 +121,10 @@ impl<'a, I: Input> Parser<'a, I> {
}
// Handle async function expression
if { is!("async") } && { peeked_is!("function") } && {
!self.input.has_linebreak_between_cur_and_peeked()
} {
if { is!("async") }
&& { peeked_is!("function") }
&& { !self.input.has_linebreak_between_cur_and_peeked() }
{
return self.parse_async_fn_expr();
}

View File

@ -1,6 +1,6 @@
//! Parser for unary operations and binary operations.
use super::*;
use super::util::ExprExt;
use super::*;
use swc_common::Spanned;
#[parser]

View File

@ -46,10 +46,12 @@ fn arrow_assign() {
expr("a = b => false"),
box Expr::Assign(AssignExpr {
span,
left: PatOrExpr::Pat(box Ident {
span,
sym: "a".into(),
}.into()),
left: PatOrExpr::Pat(
box Ident {
span,
sym: "a".into(),
}.into()
),
op: op!("="),
right: expr("b => false"),
})
@ -116,12 +118,10 @@ fn arrow_fn() {
span,
is_async: false,
is_generator: false,
params: vec![
Pat::Ident(Ident {
span,
sym: "a".into(),
}),
],
params: vec![Pat::Ident(Ident {
span,
sym: "a".into(),
})],
body: BlockStmtOrExpr::Expr(expr("1")),
})
);
@ -134,15 +134,13 @@ fn arrow_fn_rest() {
span,
is_async: false,
is_generator: false,
params: vec![
Pat::Rest(RestPat {
dot3_token: span,
pat: box Pat::Ident(Ident {
span,
sym: "a".into(),
}),
params: vec![Pat::Rest(RestPat {
dot3_token: span,
pat: box Pat::Ident(Ident {
span,
sym: "a".into(),
}),
],
})],
body: BlockStmtOrExpr::Expr(expr("1")),
})
);
@ -155,12 +153,10 @@ fn arrow_fn_no_paren() {
span,
is_async: false,
is_generator: false,
params: vec![
Pat::Ident(Ident {
span,
sym: "a".into(),
}),
],
params: vec![Pat::Ident(Ident {
span,
sym: "a".into(),
})],
body: BlockStmtOrExpr::Expr(expr("1")),
})
);

View File

@ -1,7 +1,7 @@
use Context;
use lexer::{Input, Lexer};
use swc_common::{BytePos, Span, DUMMY_SP};
use token::*;
use Context;
/// This struct is responsible for managing current token and peeked token.
pub(super) struct ParserInput<'a, I: Input> {

View File

@ -9,7 +9,7 @@ macro_rules! unexpected {
///
/// Returns bool.
macro_rules! is {
($p:expr, BindingIdent) => {{
($p:expr,BindingIdent) => {{
let ctx = $p.ctx();
match cur!($p) {
Ok(&Word(ref w)) => !ctx.is_reserved_word(&w.clone().into()),
@ -17,7 +17,7 @@ macro_rules! is {
}
}};
($p:expr, IdentRef) => {{
($p:expr,IdentRef) => {{
let ctx = $p.ctx();
match cur!($p) {
Ok(&Word(ref w)) => !ctx.is_reserved_word(&w.clone().into()),
@ -25,19 +25,20 @@ macro_rules! is {
}
}};
($p:expr, IdentName) => {{
($p:expr,IdentName) => {{
match cur!($p) {
Ok(&Word(..)) => true,
_ => false,
}
}};
($p:expr, ';') => {{
$p.input.is(&Token::Semi) || eof!($p) || is!($p, '}')
($p:expr,';') => {{
$p.input.is(&Token::Semi)
|| eof!($p)
|| is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
($p:expr, $t:tt) => {
$p.input.is(&tok!($t))
};
@ -45,7 +46,9 @@ macro_rules! is {
/// Returns true on eof.
macro_rules! eof {
($p:expr) => { cur!($p).is_err() };
($p:expr) => {
cur!($p).is_err()
};
}
macro_rules! peeked_is {
@ -68,7 +71,11 @@ macro_rules! assert_and_bump {
($p:expr, $t:tt) => {{
const TOKEN: &Token = &tok!($t);
if !$p.input.is(TOKEN) {
unreachable!("assertion failed: expected {:?}, got {:?}", TOKEN, $p.input.cur());
unreachable!(
"assertion failed: expected {:?}, got {:?}",
TOKEN,
$p.input.cur()
);
}
bump!($p);
}};
@ -79,9 +86,11 @@ macro_rules! assert_and_bump {
/// Returns bool if token is static, and Option<Token>
/// if token has data like string.
macro_rules! eat {
($p:expr, ';') => {{
($p:expr,';') => {{
debug!($p.session.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || eof!($p) || is!($p, '}')
$p.input.eat(&Token::Semi)
|| eof!($p)
|| is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
@ -133,7 +142,7 @@ macro_rules! cur {
let pos = $p.input.last_pos();
let last = Span::new(pos, pos, Default::default());
let is_err_token = match $p.input.cur() {
Some(&$crate::token::Token::Error(..)) => { true },
Some(&$crate::token::Token::Error(..)) => true,
_ => false,
};
if is_err_token {
@ -196,11 +205,13 @@ macro_rules! cur_pos {
($p:expr) => {{
let pos = $p.input.cur_pos();
pos
}}
}};
}
macro_rules! last_pos {
($p:expr) => { $p.input.prev_span().hi() };
($p:expr) => {
$p.input.prev_span().hi()
};
}
macro_rules! return_if_arrow {
@ -214,10 +225,10 @@ macro_rules! return_if_arrow {
// None => false
// };
// if is_cur {
match *$expr {
Expr::Arrow{..} => return Ok($expr),
_ => {},
}
match *$expr {
Expr::Arrow { .. } => return Ok($expr),
_ => {}
}
// }
}};
}
@ -228,8 +239,11 @@ macro_rules! span {
let start: ::swc_common::BytePos = $start;
let end: ::swc_common::BytePos = last_pos!($p);
if cfg!(debug_assertions) && start > end {
unreachable!("assertion failed: (span.start <= span.end).
start = {}, end = {}", start.0, end.0)
unreachable!(
"assertion failed: (span.start <= span.end).
start = {}, end = {}",
start.0, end.0
)
}
::swc_common::Span::new(start, end, Default::default())
}};

View File

@ -2,7 +2,6 @@
#![deny(non_snake_case)]
use self::input::ParserInput;
use self::util::ParseObject;
use {Context, Session};
use ast::*;
use error::SyntaxError;
use lexer::Input;
@ -10,19 +9,20 @@ use lexer::Lexer;
use parser_macros::parser;
use std::ops::{Deref, DerefMut};
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use swc_common::errors::DiagnosticBuilder;
use swc_common::{BytePos, Span};
use token::*;
use {Context, Session};
#[macro_use]
mod macros;
mod class_and_fn;
mod object;
mod expr;
mod ident;
mod stmt;
mod pat;
pub mod input;
mod object;
mod pat;
mod stmt;
mod util;
pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>;
@ -62,7 +62,6 @@ impl<'a, I: Input> Parser<'a, I> {
}
pub fn parse_module(&mut self) -> PResult<'a, Module> {
let start = cur_pos!();
//TOOD: parse() -> PResult<'a, Program>
let ctx = Context {
module: true,
@ -72,6 +71,7 @@ impl<'a, I: Input> Parser<'a, I> {
// module code is always in strict mode
self.set_ctx(ctx);
let start = cur_pos!();
self.parse_block_body(true, true, None).map(|body| Module {
span: span!(start),
body,
@ -86,7 +86,17 @@ impl<'a, I: Input> Parser<'a, I> {
#[cfg(test)]
pub fn test_parser<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Parser<::FileMapInput>) -> Ret,
F: for<'a> FnOnce(&'a mut Parser<'a, ::FileMapInput>) -> Ret,
{
::with_test_sess(s, |sess, input| f(&mut Parser::new(sess, input)))
}
#[test]
fn module_legacy() {
test_parser("<!--", |f| {
let res = f.parse_module();
assert!(f.ctx().module);
assert!(f.ctx().strict);
let _ = res.expect_err("!").cancel();
});
}

View File

@ -62,7 +62,8 @@ impl<'a, I: Input> Parser<'a, I> {
},
LBracket => {
bump!();
let expr = self.include_in_expr(true)
let expr = self
.include_in_expr(true)
.parse_assignment_expr()
.map(PropName::Computed)?;
expect!(']');
@ -92,17 +93,19 @@ impl<'a, I: Input> ParseObject<'a, (Box<Expr>)> for Parser<'a, I> {
let span_of_gen = span!(start);
let name = self.parse_prop_name()?;
return self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
None,
Some(span_of_gen),
).map(|function| {
Prop::Method(MethodProp {
key: name,
function,
})
});
return self
.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
None,
Some(span_of_gen),
)
.map(|function| {
Prop::Method(MethodProp {
key: name,
function,
})
});
}
let key = self.parse_prop_name()?;
@ -118,7 +121,8 @@ impl<'a, I: Input> ParseObject<'a, (Box<Expr>)> for Parser<'a, I> {
// Handle `a(){}` (and async(){} / get(){} / set(){})
if is!('(') {
return self.parse_fn_args_body(start, Parser::parse_unique_formal_params, None, None)
return self
.parse_fn_args_body(start, Parser::parse_unique_formal_params, None, None)
.map(|function| Prop::Method(MethodProp { key, function }));
}
@ -151,7 +155,8 @@ impl<'a, I: Input> ParseObject<'a, (Box<Expr>)> for Parser<'a, I> {
let key = self.parse_prop_name()?;
return match ident.sym {
js_word!("get") => self.parse_fn_args_body(start, |_| Ok(vec![]), None, None)
js_word!("get") => self
.parse_fn_args_body(start, |_| Ok(vec![]), None, None)
.map(|Function { body, .. }| {
Prop::Getter(GetterProp {
span: span!(start),
@ -159,26 +164,30 @@ impl<'a, I: Input> ParseObject<'a, (Box<Expr>)> for Parser<'a, I> {
body,
})
}),
js_word!("set") => self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
None,
None,
).map(|Function { params, body, .. }| {
assert_eq!(params.len(), 1);
Prop::Setter(SetterProp {
span: span!(start),
key,
body,
param: params.into_iter().next().unwrap(),
js_word!("set") => {
self.parse_fn_args_body(
start,
|p| p.parse_formal_param().map(|pat| vec![pat]),
None,
None,
).map(|Function { params, body, .. }| {
assert_eq!(params.len(), 1);
Prop::Setter(SetterProp {
span: span!(start),
key,
body,
param: params.into_iter().next().unwrap(),
})
})
}),
js_word!("async") => self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
Some(ident.span),
None,
).map(|function| Prop::Method(MethodProp { key, function })),
}
js_word!("async") => {
self.parse_fn_args_body(
start,
Parser::parse_unique_formal_params,
Some(ident.span),
None,
).map(|function| Prop::Method(MethodProp { key, function }))
}
_ => unreachable!(),
};
}

View File

@ -1,6 +1,6 @@
//! 13.3.3 Destructuring Binding Patterns
use super::*;
use super::util::ExprExt;
use super::*;
use std::iter;
use swc_common::Spanned;
@ -286,10 +286,8 @@ impl<'a, I: Input> Parser<'a, I> {
Prop::KeyValue(KeyValueProp { key, value }) => {
Ok(ObjectPatProp::KeyValue(KeyValuePatProp {
key,
value: box self.reparse_expr_as_pat(
pat_ty.element(),
value,
)?,
value: box self
.reparse_expr_as_pat(pat_ty.element(), value)?,
}))
}
Prop::Assign(AssignProp { key, value }) => {

View File

@ -1,5 +1,5 @@
use super::*;
use super::pat::PatType;
use super::*;
use swc_common::Spanned;
mod module_item;
@ -184,7 +184,8 @@ impl<'a, I: Input> Parser<'a, I> {
}
// Handle async function foo() {}
if is!("async") && peeked_is!("function")
if is!("async")
&& peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
return self.parse_async_fn_decl().map(From::from);

View File

@ -141,7 +141,8 @@ impl<'a, I: Input> Parser<'a, I> {
if eat!("default") {
let decl = if is!("class") {
self.parse_default_class()?
} else if is!("async") && peeked_is!("function")
} else if is!("async")
&& peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
self.parse_default_async_fn()?
@ -158,7 +159,8 @@ impl<'a, I: Input> Parser<'a, I> {
let decl = if is!("class") {
self.parse_class_decl()?
} else if is!("async") && peeked_is!("function")
} else if is!("async")
&& peeked_is!("function")
&& !self.input.has_linebreak_between_cur_and_peeked()
{
self.parse_async_fn_decl()?

View File

@ -194,13 +194,18 @@ pub(crate) struct TokenAndSpan {
#[derive(Kind, Clone, PartialEq, Eq, Hash, Fold)]
#[kind(functions(starts_expr = "bool", before_expr = "bool"))]
pub enum Word {
#[kind(delegate)] Keyword(Keyword),
#[kind(delegate)]
Keyword(Keyword),
#[kind(starts_expr)] Null,
#[kind(starts_expr)] True,
#[kind(starts_expr)] False,
#[kind(starts_expr)]
Null,
#[kind(starts_expr)]
True,
#[kind(starts_expr)]
False,
#[kind(starts_expr)] Ident(JsWord),
#[kind(starts_expr)]
Ident(JsWord),
}
impl From<JsWord> for Word {
@ -341,26 +346,33 @@ pub enum Keyword {
Await,
Break,
#[kind(before_expr)] Case,
#[kind(before_expr)]
Case,
Catch,
Continue,
Debugger,
#[kind(before_expr)] Default_,
#[kind(before_expr)] Do,
#[kind(before_expr)] Else,
#[kind(before_expr)]
Default_,
#[kind(before_expr)]
Do,
#[kind(before_expr)]
Else,
Finally,
For,
#[kind(starts_expr)] Function,
#[kind(starts_expr)]
Function,
If,
#[kind(before_expr)] Return,
#[kind(before_expr)]
Return,
Switch,
#[kind(before_expr, starts_expr)] Throw,
#[kind(before_expr, starts_expr)]
Throw,
Try,
Var,
@ -369,29 +381,40 @@ pub enum Keyword {
While,
With,
#[kind(before_expr, starts_expr)] New,
#[kind(starts_expr)] This,
#[kind(starts_expr)] Super,
#[kind(before_expr, starts_expr)]
New,
#[kind(starts_expr)]
This,
#[kind(starts_expr)]
Super,
#[kind(starts_expr)] Class,
#[kind(starts_expr)]
Class,
#[kind(before_expr)] Extends,
#[kind(before_expr)]
Extends,
Export,
#[kind(starts_expr)] Import,
#[kind(starts_expr)]
Import,
/// Spec says this might be identifier.
#[kind(before_expr, starts_expr)]
Yield,
#[kind(before_expr)] In,
#[kind(before_expr)] InstanceOf,
#[kind(before_expr)]
In,
#[kind(before_expr)]
InstanceOf,
#[kind(before_expr, starts_expr)] TypeOf,
#[kind(before_expr, starts_expr)]
TypeOf,
#[kind(before_expr, starts_expr)] Void,
#[kind(before_expr, starts_expr)]
Void,
#[kind(before_expr, starts_expr)] Delete,
#[kind(before_expr, starts_expr)]
Delete,
}
impl Keyword {

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/11d61dbd7c1fbd1b.js:1:34
|
1 | function hello() { 'use strict'; 021; }

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/4ce3c0a393c624d5.js:1:15
|
1 | 'use strict'; 0123

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/66e667cc2b718770.js:1:37
|
1 | function hello() { 'use strict'; ({ 021: 42 }); }

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/80bfa9f27278bbba.js:1:66
|
1 | "use strict";function foo(){"use strict";}function bar(){var v = 015}

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/938db8c9f82c8cb5.module.js:1:1
|
1 | 01

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/ab35979364766bf0.js:1:15
|
1 | 'use strict'; 07

View File

@ -1,6 +1,7 @@
error: Unexpected token
error: Legacy comments cannot be used in module code
--> $DIR/tests/test262-parser/fail/d0804b4856dbb6be.module.js:2:1
|
2 | <!--
| ^
2 | / <!--
3 | | }
| |_

View File

@ -1,6 +1,6 @@
error: Unexpected token
error: Legacy comments cannot be used in module code
--> $DIR/tests/test262-parser/fail/d3ac25ddc7ba9779.module.js:1:1
|
1 | <!--
| ^
| ^^^^

View File

@ -1,4 +1,4 @@
error: Legacy decimal escape is not permitted in strict mode
error: Legacy octal escape is not permitted in strict mode
--> $DIR/tests/test262-parser/fail/f6924dd818b18733.js:1:15
|
1 | 'use strict'; 01

View File

@ -1,5 +1,4 @@
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(specialization)]
#![feature(test)]
@ -9,17 +8,17 @@ extern crate swc_ecma_parser;
extern crate test;
extern crate testing;
use std::env;
use std::fs::File;
use std::fs::read_dir;
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use swc_common::{Fold, FoldWith};
use swc_common::FileName;
use swc_common::Span;
use swc_ecma_parser::{FileMapInput, PResult, Parser, Session};
use swc_common::{Fold, FoldWith};
use swc_ecma_parser::ast::*;
use test::{test_main, Options, TestDesc, TestDescAndFn, TestFn, TestName};
use swc_ecma_parser::{FileMapInput, PResult, Parser, Session};
use test::ShouldPanic::No;
use test::{test_main, Options, TestDesc, TestDescAndFn, TestFn, TestName};
use testing::NormalizedOutput;
const IGNORED_PASS_TESTS: &[&str] = &[
@ -79,7 +78,7 @@ fn add_test<F: FnOnce() + Send + 'static>(
tests.push(TestDescAndFn {
desc: TestDesc {
name: TestName::DynTestName(name),
ignore: ignore,
ignore,
should_panic: No,
allow_fail: false,
},
@ -111,8 +110,8 @@ fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
eprintln!("Loading tests from {}", root.display());
const TYPES: &[&str] = &[
"fail" /* TODO
* "early" */
"fail", /* TODO
* "early" */
];
for err_type in TYPES {
@ -162,10 +161,12 @@ fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
parse_script(&path, &input).expect_err("should fail, but parsed as")
};
if err.compare_to_file(format!(
"{}.stderr",
error_reference_dir.join(file_name).display()
)).is_err()
if err
.compare_to_file(format!(
"{}.stderr",
error_reference_dir.join(file_name).display()
))
.is_err()
{
panic!()
}
@ -291,24 +292,15 @@ where
}
}
fn normalize<T>(mut t: T) -> T
fn normalize<T>(t: T) -> T
where
Normalizer: Fold<T>,
{
loop {
let mut n = Normalizer {
did_something: false,
};
t = n.fold(t);
if !n.did_something {
return t;
}
}
let mut n = Normalizer;
n.fold(t)
}
struct Normalizer {
did_something: bool,
}
struct Normalizer;
impl Fold<Span> for Normalizer {
fn fold(&mut self, _: Span) -> Span {
Span::default()
@ -325,7 +317,7 @@ impl Fold<Str> for Normalizer {
}
impl Fold<Expr> for Normalizer {
fn fold(&mut self, e: Expr) -> Expr {
let e = e.fold_with(self);
let e = e.fold_children(self);
match e {
Expr::Paren(ParenExpr { expr, .. }) => *expr,
@ -333,24 +325,19 @@ impl Fold<Expr> for Normalizer {
callee,
args: None,
span,
}) => {
self.did_something = true;
Expr::New(NewExpr {
span,
callee: self.fold(callee),
args: Some(vec![]),
})
}
}) => Expr::New(NewExpr {
span,
callee,
args: Some(vec![]),
}),
// Flatten comma expressions.
Expr::Seq(SeqExpr { exprs, span }) => {
let mut exprs = self.fold(exprs);
Expr::Seq(SeqExpr { mut exprs, span }) => {
let need_work = exprs.iter().any(|n| match **n {
Expr::Seq(..) => true,
_ => false,
});
if need_work {
self.did_something = true;
exprs = exprs.into_iter().fold(vec![], |mut v, e| {
match *e {
Expr::Seq(SeqExpr { exprs, .. }) => v.extend(exprs),
@ -368,24 +355,20 @@ impl Fold<Expr> for Normalizer {
impl Fold<PropName> for Normalizer {
fn fold(&mut self, n: PropName) -> PropName {
let n = n.fold_children(self);
match n {
PropName::Ident(Ident { sym, .. }) => {
self.did_something = true;
PropName::Str(Str {
span: Default::default(),
value: sym,
has_escape: false,
})
}
PropName::Num(num) => {
self.did_something = true;
PropName::Str(Str {
span: Default::default(),
value: num.to_string().into(),
has_escape: false,
})
}
_ => n.fold_children(self),
PropName::Ident(Ident { sym, .. }) => PropName::Str(Str {
span: Default::default(),
value: sym,
has_escape: false,
}),
PropName::Num(num) => PropName::Str(Str {
span: Default::default(),
value: num.to_string().into(),
has_escape: false,
}),
_ => n,
}
}
}

View File

@ -8,12 +8,12 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../../macros/common" }
proc-macro2 = "0.2"
proc-macro2 = "0.4.4"
[dependencies.syn]
version = "0.12"
version = "0.14.1"
features = ["fold"]
[dependencies.quote]
version = "0.4"
version = "0.6.3"

View File

@ -17,11 +17,11 @@ fn get_joinned_span(t: &ToTokens) -> Span {
let (mut first, mut last) = (None, None);
for tt in tts {
match first {
None => first = Some(tt.span),
None => first = Some(tt.span()),
_ => {}
}
last = Some(tt.span);
last = Some(tt.span());
}
let cs = Span::call_site();
first.unwrap_or(cs).join(last.unwrap_or(cs)).unwrap_or(cs)
@ -58,24 +58,22 @@ impl Fold for InjectSelf {
}
}
match i.method.as_ref() {
"parse_with" | "spanned" => {
//TODO
let parser = get_parser_arg(&i);
return fold::fold_expr_method_call(
&mut InjectSelf {
parser: Some(parser),
},
i,
);
}
_ => {}
};
if i.method == "parse_with" || i.method == "spanned" {
//TODO
let parser = get_parser_arg(&i);
return fold::fold_expr_method_call(
&mut InjectSelf {
parser: Some(parser),
},
i,
);
}
fold::fold_expr_method_call(self, i)
}
fn fold_method_sig(&mut self, i: MethodSig) -> MethodSig {
self.parser = i.decl
self.parser = i
.decl
.inputs
.first()
.map(Pair::into_value)
@ -96,8 +94,11 @@ impl Fold for InjectSelf {
fn fold_macro(&mut self, i: Macro) -> Macro {
let parser = match self.parser {
Some(s) => s,
_ => return i,
Some(ref s) => s.clone(),
_ => {
// If we are not in parser, don't do anything.
return i;
}
};
let name = i.path.dump().to_string();
@ -108,7 +109,8 @@ impl Fold for InjectSelf {
"println" | "print" | "format" | "assert" | "assert_eq" | "assert_ne"
| "debug_assert" | "debug_assert_eq" | "debug_assert_ne" => {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
args = args.into_pairs()
args = args
.into_pairs()
.map(|el| el.map_item(|expr| self.fold_expr(expr)))
.collect();
return Macro {
@ -143,7 +145,8 @@ impl Fold for InjectSelf {
quote_spanned!(span => #parser).into()
} else {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
let args = args.into_pairs()
let args = args
.into_pairs()
.map(|el| el.map_item(|expr| self.fold_expr(expr)))
.map(|arg| arg.dump())
.flat_map(|t| TokenStream::from(t));

View File

@ -2,7 +2,8 @@
//! It lives here because it's not a generic purpose macro (at all).
//!
//! This can't be implemented with macro_rule! because
//! rust does not support token munching (destructing `$b:block` into `{ $($t:tt)* }`).
//! rust does not support token munching (destructing `$b:block` into `{
//! $($t:tt)* }`).
//!
//!

View File

@ -8,13 +8,13 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = "0.1"
proc-macro2 = { version = "0.2", features = ["nightly"] }
quote = "0.4"
darling = "0.3"
pmutil = "0.2"
proc-macro2 = { version = "0.4.4", features = ["nightly"] }
quote = "0.6"
darling = { git = "https://github.com/hcpl/darling.git", branch = "update-deps" }
[dependencies.syn]
version = "0.12"
version = "0.14.1"
features = ["derive", "fold", "parsing", "printing"]

View File

@ -27,7 +27,7 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
.map(|f| f.ty.clone())
.map(normalize_type_for_bound)
.map(|ty| {
Quote::new(Span::def_site())
Quote::new(def_site::<Span>())
.quote_with(smart_quote!(
Vars { Type: &ty },
(Type: swc_common::FoldWith<__Folder>)
@ -50,26 +50,29 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
.map(|binding| {
// This closure will not be called for unit-like struct.
let field_name: Tokens = binding
let field_name: TokenStream = binding
.field()
.ident
.as_ref()
.map(|s| s.dump())
.unwrap_or_else(|| {
// Use index
// call_site is important for unexported tuple fields.
Index {
index: binding.idx() as _,
span: def_site(),
span: call_site(),
}.dump()
});
let value = match should_skip_field(binding.field()) {
true => Quote::new(Span::def_site()).quote_with(smart_quote!(
true => Quote::new(def_site::<Span>()).quote_with(smart_quote!(
Vars {
binded_field: binding.name(),
},
{ binded_field }
)),
false => Quote::new(Span::def_site()).quote_with(smart_quote!(
false => Quote::new(def_site::<Span>()).quote_with(smart_quote!(
Vars {
FieldType: &binding.field().ty,
binded_field: binding.name(),
@ -78,7 +81,7 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
)),
};
let v = Quote::new(Span::def_site())
let v = Quote::new(def_site::<Span>())
.quote_with(smart_quote!(
Vars { field_name, value },
(field_name: value)
@ -99,7 +102,7 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
.collect();
let body = match *v.data() {
// Handle unit like structs separately
// Handle unit-like structs separately
Fields::Unit => box Quote::new(Span::def_site())
.quote_with(smart_quote!(Vars { Name: qual_name }, {
{
@ -124,15 +127,18 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
Arm {
body,
attrs: v.attrs()
attrs: v
.attrs()
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
pats: vec![Element::End(pat)].into_iter().collect(),
guard: None,
rocket_token: def_site(),
fat_arrow_token: def_site(),
comma: Some(def_site()),
leading_vert: None,
}
})
.collect();

View File

@ -27,7 +27,7 @@ pub fn derive(
}
let field = unnamed.into_iter().next().unwrap();
let from_impl = Quote::new(Span::def_site().located_at(Span::def_site()))
let from_impl = Quote::new(def_site::<Span>())
.quote_with(smart_quote!(
Vars {
VariantType: field.ty,

View File

@ -38,19 +38,21 @@ pub fn derive_spanned(input: proc_macro::TokenStream) -> proc_macro::TokenStream
pub fn derive_from_variant(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse::<DeriveInput>(input).expect("failed to parse input as DeriveInput");
let item = self::from_variant::derive(input)
.into_iter()
.fold(Tokens::new(), |mut t, item| {
let item = self::from_variant::derive(input).into_iter().fold(
TokenStream::new(),
|mut t, item| {
item.to_tokens(&mut t);
t
});
},
);
print_item("derive(FromVariant)", item.dump())
}
/// Alias for
/// `#[derive(Spanned, Fold, Clone, Debug, PartialEq)]` for a struct and
/// `#[derive(Spanned, Fold, Clone, Debug, PartialEq, FromVariant)]` for an enum.
/// `#[derive(Spanned, Fold, Clone, Debug, PartialEq, FromVariant)]` for an
/// enum.
#[proc_macro_attribute]
pub fn ast_node(
args: proc_macro::TokenStream,
@ -62,10 +64,8 @@ pub fn ast_node(
let input: DeriveInput = parse(input).expect("failed to parse input as a DeriveInput");
// If we use call_site with proc_macro feature enabled,
// only attributes for first derive works.
// See https://github.com/rust-lang/rust/issues/46489
let mut item = Quote::new(Span::def_site().located_at(Span::call_site()));
// we should use call_site
let mut item = Quote::new(Span::call_site());
item = match input.data {
Data::Enum(..) => item.quote_with(smart_quote!(Vars { input }, {
#[derive(FromVariant, Spanned, Fold, Clone, Debug, PartialEq)]
@ -82,11 +82,10 @@ pub fn ast_node(
/// Workarounds https://github.com/rust-lang/rust/issues/44925
fn print_item<T: Into<TokenStream>>(name: &'static str, item: T) -> proc_macro::TokenStream {
let item = Quote::new(Span::def_site().located_at(Span::call_site())).quote_with(
smart_quote!(Vars { item: item.into() }, {
let item =
Quote::new(def_site::<Span>()).quote_with(smart_quote!(Vars { item: item.into() }, {
extern crate swc_common;
item
}),
);
}));
print(name, item)
}

View File

@ -28,15 +28,17 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
Arm {
body,
attrs: v.attrs()
attrs: v
.attrs()
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
pats: vec![Element::End(pat)].into_iter().collect(),
guard: None,
rocket_token: def_site(),
fat_arrow_token: def_site(),
comma: Some(def_site()),
leading_vert: None,
}
})
.collect();
@ -45,13 +47,13 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
attrs: Default::default(),
match_token: def_site(),
brace_token: def_site(),
expr: box Quote::new(Span::def_site())
expr: box Quote::new(def_site::<Span>())
.quote_with(smart_quote!(Vars {}, { self }))
.parse(),
arms,
});
Quote::new(Span::def_site())
Quote::new(def_site::<Span>())
.quote_with(smart_quote!(
Vars {
Type: &input.ident,
@ -72,7 +74,7 @@ pub fn derive(input: DeriveInput) -> ItemImpl {
fn make_body_for_variant(v: &VariantBinder, bindings: Vec<BindedField>) -> Box<Expr> {
/// `swc_common::Spanned::span(#field)`
fn simple_field(field: &ToTokens) -> Box<Expr> {
box Quote::new(Span::def_site())
box Quote::new(def_site::<Span>())
.quote_with(smart_quote!(Vars { field }, {
swc_common::Spanned::span(field)
}))
@ -115,7 +117,13 @@ fn make_body_for_variant(v: &VariantBinder, bindings: Vec<BindedField>) -> Box<E
if !has_any_span_attr {
let span_field = bindings
.iter()
.find(|b| Some("span") == b.field().ident.as_ref().map(|ident| ident.as_ref()))
.find(|b| {
b.field()
.ident
.as_ref()
.map(|ident| ident == "span")
.unwrap_or(false)
})
.unwrap_or_else(|| {
panic!(
"#[derive(Spanned)]: cannot determine span field to use for {}",
@ -138,7 +146,7 @@ fn make_body_for_variant(v: &VariantBinder, bindings: Vec<BindedField>) -> Box<E
match (lo, hi) {
(Some(&(ref lo_field, _)), Some(&(ref hi_field, _))) => {
// Create a new span from lo_field.lo(), hi_field.hi()
box Quote::new(Span::def_site())
box Quote::new(def_site::<Span>())
.quote_with(smart_quote!(Vars { lo_field, hi_field }, {
swc_common::Spanned::span(lo_field)
.with_hi(swc_common::Spanned::span(hi_field).hi())

View File

@ -8,5 +8,4 @@ use ast_node::*;
pub struct Struct {}
#[derive(Debug, FromVariant, Fold)]
pub enum Enum {
}
pub enum Enum {}

View File

@ -15,24 +15,24 @@ impl<F> FoldWith<F> for PanicOnFold {
#[test]
fn ignore_struct_named_field() {
#[derive(Fold, Debug, Clone, Copy, Default, PartialEq, Eq)]
#[derive(Fold, Debug, Default, PartialEq)]
struct Foo {
#[fold(ignore)]
pub named: PanicOnFold,
named: PanicOnFold,
}
Foo::default().fold_with(&mut MyFolder);
}
#[test]
fn ignore_struct_unnamed_field() {
#[derive(Fold, Debug, Clone, Copy, Default, PartialEq, Eq)]
#[derive(Fold, Debug, Default, PartialEq)]
struct Bar(#[fold(ignore)] PanicOnFold);
Bar::default().fold_with(&mut MyFolder);
}
#[test]
fn ignore_enum_unnamed_field() {
#[derive(Fold, Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Fold, Debug, PartialEq)]
enum A {
Field(#[fold(ignore)] PanicOnFold),
}
@ -42,7 +42,7 @@ fn ignore_enum_unnamed_field() {
#[test]
fn ignore_enum_named_field() {
#[derive(Fold, Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Fold, Debug, PartialEq)]
enum A {
Field {
#[fold(ignore)]

View File

@ -4,10 +4,10 @@ version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
pmutil = "0.1"
proc-macro2 = "0.2"
quote = "0.4"
pmutil = "0.2"
proc-macro2 = "0.4.4"
quote = "0.6.3"
[dependencies.syn]
version = "0.12"
version = "0.14.1"
features = ["derive", "visit", "parsing", "full", "printing", "extra-traits"]

View File

@ -32,13 +32,14 @@
//! -----
//!
//! Adopted from `synstructure`.
use def_site;
use is_attr_name;
use pmutil::prelude::*;
use proc_macro2::Span;
use quote::{ToTokens, Tokens};
use syn::*;
use quote::ToTokens;
use syn::punctuated::Pair;
use syn::token::{Mut, Ref};
use syn::*;
use syn_ext::PairExt;
/// Used to bind whole struct or enum.
@ -116,7 +117,7 @@ impl<'a> VariantBinder<'a> {
/// `EnumName::VariantName` for enum, and `StructName` for struct.
pub fn qual_path(&self) -> Path {
match self.enum_name {
Some(enum_name) => Quote::new(Span::def_site())
Some(enum_name) => Quote::new(def_site::<Span>())
.quote_with(smart_quote!(
Vars {
EnumName: enum_name,
@ -161,7 +162,9 @@ impl<'a> VariantBinder<'a> {
.enumerate()
.map(|(idx, f)| {
f.map_item(|f| {
let ident = f.ident
let ident = f
.ident
.clone()
.expect("field of struct-like variants should have name");
let binded_ident = ident.new_ident_with(|s| format!("{}{}", prefix, s));
@ -171,7 +174,8 @@ impl<'a> VariantBinder<'a> {
field: f,
});
FieldPat {
attrs: f.attrs
attrs: f
.attrs
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
@ -214,7 +218,7 @@ impl<'a> VariantBinder<'a> {
.map(|(idx, f)| {
f.map_item(|f| {
let binded_ident =
Span::def_site().new_ident(format!("{}{}", prefix, idx));
def_site::<Span>().new_ident(format!("{}{}", prefix, idx));
bindings.push(BindedField {
idx,
binded_ident: binded_ident.clone(),
@ -287,7 +291,7 @@ impl<'a> BindedField<'a> {
}
impl<'a> ToTokens for BindedField<'a> {
fn to_tokens(&self, t: &mut Tokens) {
fn to_tokens(&self, t: &mut TokenStream) {
self.binded_ident.to_tokens(t)
}
}

View File

@ -21,8 +21,8 @@ impl<'a> Derive<'a> {
if path.leading_colon.is_none() {
if let Some(seg) = path.segments.first() {
let id = seg.value().ident;
if self.params.contains(&id) {
let id = &seg.value().ident;
if self.params.contains(id) {
self.is_generic = true;
}
}
@ -53,7 +53,8 @@ impl<'a> Derive<'a> {
}
let mut vis = FieldVisitor {
params: self.input
params: self
.input
.generics
.params
.iter()
@ -101,12 +102,13 @@ impl<'a> Derive<'a> {
let bound = WherePredicate::Type(PredicateType {
lifetimes: None,
bounded_ty: self_ty,
colon_token: Default::default(),
colon_token: def_site(),
// `Trait` in `Self: Trait`
bounds: iter::once(Pair::End(TypeParamBound::Trait(TraitBound {
modifier: TraitBoundModifier::None,
lifetimes: None,
path: trait_,
paren_token: None,
}))).collect(),
});

View File

@ -1,9 +1,10 @@
use def_site;
use pmutil::ToTokensExt;
use quote::{ToTokens, Tokens};
use proc_macro2::TokenStream;
use quote::ToTokens;
use std::iter;
use syn::*;
use syn::punctuated::Pair;
use syn::*;
mod generics;
@ -47,7 +48,7 @@ impl<'a> Derive<'a> {
// Handle generic delcared on type.
let ty: Box<Type> = {
let (_, ty_generics, _) = input.generics.split_for_impl();
let mut t = Tokens::new();
let mut t = TokenStream::new();
input.ident.to_tokens(&mut t);
ty_generics.to_tokens(&mut t);
box parse(t.dump().into()).unwrap_or_else(|err| {

View File

@ -9,22 +9,23 @@ extern crate proc_macro2;
#[macro_use]
extern crate quote;
extern crate syn;
use pmutil::SpanExt;
use pmutil::synom_ext::FromSpan;
use pmutil::SpanExt;
use proc_macro2::Span;
use syn::*;
pub mod binder;
pub mod derive;
pub mod prelude;
mod syn_ext;
pub mod binder;
pub fn call_site<T: FromSpan>() -> T {
Span::call_site().as_token()
}
/// `Span::def_site().located_at(Span::call_site()).as_token()`
pub fn def_site<T: FromSpan>() -> T {
Span::def_site().as_token()
Span::def_site().located_at(Span::call_site()).as_token()
}
/// `attr` - tokens inside `#[]`. e.g. `derive(EqIgnoreSpan)`, ast_node

View File

@ -1,10 +1,10 @@
pub use super::{call_site, def_site, doc_str, is_attr_name, print};
pub use super::binder::{BindedField, Binder, VariantBinder};
pub use super::derive::Derive;
pub use super::syn_ext::{ItemImplExt, PairExt};
pub use super::{call_site, def_site, doc_str, is_attr_name, print};
pub use pmutil::prelude::*;
pub use proc_macro2::{Delimiter, Literal, Span, TokenNode, TokenStream, TokenTree};
pub use quote::{ToTokens, Tokens};
pub use syn::*;
pub use syn::punctuated::{Pair, Punctuated};
pub use proc_macro2::{Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree};
pub use quote::ToTokens;
pub use syn::punctuated::Pair as Element;
pub use syn::punctuated::{Pair, Punctuated};
pub use syn::*;

View File

@ -1,7 +1,7 @@
use def_site;
use pmutil::prelude::*;
use syn::*;
use syn::punctuated::Pair;
use syn::*;
/// Extension trait for `ItemImpl` (impl block).
pub trait ItemImplExt {

View File

@ -8,11 +8,11 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = "0.1"
proc-macro2 = "0.2"
pmutil = "0.2"
proc-macro2 = "0.4.4"
[dependencies.syn]
version = "0.12"
version = "0.14.1"
features = ["full", "parsing", "printing", "extra-traits"]

View File

@ -1,5 +1,5 @@
use common::prelude::*;
use input::*;
use swc_macros_common::prelude::*;
use util::is_bool;
pub fn expand(
@ -32,8 +32,8 @@ pub fn expand(
let used = attrs
.fns
.iter()
.map(|f| f.name)
.any(|fn_name| value.fn_name == fn_name || value.fn_name == "delegate");
.map(|f| &f.name)
.any(|fn_name| value.fn_name == *fn_name || value.fn_name == "delegate");
if !used {
panic!("Unknown function `{}` on variant {}", value.fn_name, v.name)
}
@ -46,7 +46,7 @@ pub fn expand(
.into_iter()
.map(|f| f.expand(&name, vis.clone(), &variants))
.map(ImplItem::Method)
.fold(Tokens::new(), |mut t, i| {
.fold(TokenStream::new(), |mut t, i| {
i.to_tokens(&mut t);
t
});
@ -76,7 +76,7 @@ impl FnDef {
default_value,
} = self;
let name_span = name.span;
let name_span = name.span();
let arms =
variants
@ -88,7 +88,8 @@ impl FnDef {
.bind("_", Some(call_site()), None);
let body = {
let value = match v.attrs
let value = match v
.attrs
.fn_values
.iter()
.find(|fn_val| fn_val.fn_name == name)
@ -105,7 +106,7 @@ impl FnDef {
.quote_with(smart_quote!(
Vars {
field,
method: name,
method: &name,
},
{ field.method() }
))
@ -139,15 +140,17 @@ impl FnDef {
Arm {
pats: vec![Element::End(pat)].into_iter().collect(),
body,
leading_vert: None,
// Forward cfg attributes.
attrs: v.attrs
attrs: v
.attrs
.extras
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
rocket_token: call_site(),
fat_arrow_token: call_site(),
comma: Some(call_site()),
guard: None,
}
@ -173,11 +176,10 @@ impl FnDef {
constness: None,
unsafety: None,
abi: None,
ident: name,
// fn (&self) -> ReturnTpe
decl: FnDecl {
fn_token: name.span.as_token(),
paren_token: name.span.as_token(),
fn_token: name.span().as_token(),
paren_token: name.span().as_token(),
inputs: vec![
// TODO
Element::End(FnArg::SelfRef(ArgSelfRef {
@ -192,6 +194,7 @@ impl FnDef {
generics: Default::default(),
variadic: None,
},
ident: name,
},
block: Block {

View File

@ -1,164 +1,163 @@
//! # Atributes on enum
//! ## functions
//! `#[kind(functions(name = "return_type"))]`
//!
//! ```rust
//! #[macro_use]
//! extern crate enum_kind;
//!
//! /// You can split attributes if you want.
//! #[derive(Kind)]
//! #[kind(functions(is_a = "bool", is_b = "bool"))]
//! #[kind(functions(is_a_or_b = "bool", num = "u8"))]
//! pub enum E {
//! #[kind(is_a, is_a_or_b, num = "1")]
//! A,
//! /// You can split attributes if you want.
//! #[kind(is_b)]
//! #[kind(is_a_or_b)]
//! #[kind(num = "2")]
//! B(u8),
//! /// Default value of bool is false if not specified and true if specified.
//! ///
//! /// Both struct like variant and tuple like variant are supported.
//! #[kind(num = "3")]
//! C {},
//! }
//! # fn main() {
//! assert!(E::A.is_a() && E::A.is_a_or_b() && !E::A.is_b());
//! assert_eq!(E::A.num(), 1);
//!
//! assert!(!E::B(0).is_a() && E::B(0).is_a_or_b() && E::B(0).is_b());
//! assert_eq!(E::B(0).num(), 2);
//!
//! assert!(!E::C{}.is_a() && !E::C{}.is_a_or_b() && !E::C{}.is_b());
//! assert_eq!(E::C{}.num(), 3);
//!
//!
//! # }
//! ```
//!
//! -----
//!
//! # Real usecase
//!
//! ```rust
//! #[macro_use]
//! extern crate enum_kind;
//!
//! #[derive(Kind, Debug, Clone, Eq, PartialEq, Hash)]
//! #[kind(function(precedence = "u8"))]
//! pub enum BinOpToken {
//! /// `==`
//! #[kind(precedence = "6")]
//! EqEq,
//! /// `!=`
//! #[kind(precedence = "6")]
//! NotEq,
//! /// `===`
//! #[kind(precedence = "6")]
//! EqEqEq,
//! /// `!==`
//! #[kind(precedence = "6")]
//! NotEqEq,
//! /// `<`
//! #[kind(precedence = "7")]
//! Lt,
//! /// `<=`
//! #[kind(precedence = "7")]
//! LtEq,
//! /// `>`
//! #[kind(precedence = "7")]
//! Gt,
//! #[kind(precedence = "7")]
//! /// `>=`
//! #[kind(precedence = "7")]
//! GtEq,
//! /// `<<`
//! #[kind(precedence = "8")]
//! LShift,
//! /// `>>`
//! #[kind(precedence = "8")]
//! RShift,
//! /// `>>>`
//! #[kind(precedence = "8")]
//! ZeroFillRShift,
//! /// `+`
//! #[kind(precedence = "9")]
//! Plus,
//! /// `-`
//! #[kind(precedence = "9")]
//! Minus,
//! /// `*`
//! #[kind(precedence = "10")]
//! Mul,
//! /// `/`
//! #[kind(precedence = "10")]
//! Div,
//! /// `%`
//! #[kind(precedence = "10")]
//! Mod,
//! /// `|`
//! #[kind(precedence = "3")]
//! BitOr,
//! /// `^`
//! #[kind(precedence = "4")]
//! BitXor,
//! /// `&`
//! #[kind(precedence = "5")]
//! BitAnd,
//! /// `in`
//! #[kind(precedence = "7")]
//! In,
//! /// `instanceof`
//! #[kind(precedence = "7")]
//! InstanceOf,
//! /// `**`
//! #[kind(precedence = "10")]
//! Exp,
//! /// `||`
//! #[kind(precedence = "1")]
//! LogicalOr,
//! /// `&&`
//! #[kind(precedence = "2")]
//! LogicalAnd,
//! }
//!
//! # fn main() {
//! # }
//! ```
//!
//!
//!
//!
//!
//!
//!
//!
#![feature(box_syntax)]
#[macro_use]
extern crate pmutil;
extern crate proc_macro;
extern crate proc_macro2;
extern crate swc_macros_common as common;
extern crate swc_macros_common;
#[macro_use]
extern crate syn;
use common::prelude::*;
use swc_macros_common::prelude::*;
mod expand;
mod input;
mod parse;
mod util;
/// # Atributes on enum
/// ## functions
/// `#[kind(functions(name = "return_type"))]`
///
/// ```rust,ignore
/// #[macro_use]
/// extern crate enum_kind;
///
/// /// You can split attributes if you want.
/// #[derive(Kind)]
/// #[kind(functions(is_a = "bool", is_b = "bool"))]
/// #[kind(functions(is_a_or_b = "bool", num = "u8"))]
/// pub enum E {
/// //#### #[kind(is_a, is_a_or_b, num = "1")]
/// A,
/// /// You can split attributes if you want.
/// //#### #[kind(is_b)]
/// //#### #[kind(is_a_or_b)]
/// //#### #[kind(num = "2")]
/// B(u8),
/// /// Default value of bool is false if not specified and true if specified.
/// ///
/// /// Both struct like variant and tuple like variant are supported.
/// //#### #[kind(num = "3")]
/// C {},
/// }
/// # fn main() {
/// assert!(E::A.is_a() && E::A.is_a_or_b() && !E::A.is_b());
/// assert_eq!(E::A.num(), 1);
///
/// assert!(!E::B(0).is_a() && E::B(0).is_a_or_b() && E::B(0).is_b());
/// assert_eq!(E::B(0).num(), 2);
///
/// assert!(!E::C {}.is_a() && !E::C {}.is_a_or_b() && !E::C {}.is_b());
/// assert_eq!(E::C {}.num(), 3);
///
/// # }
/// ```
///
/// -----
///
/// # Real usecase
///
/// ```rust,ignore
/// #[macro_use]
/// extern crate enum_kind;
///
/// #[derive(Kind, Debug, Clone, Eq, PartialEq, Hash)]
/// #[kind(function(precedence = "u8"))]
/// pub enum BinOpToken {
/// /// `==`
/// //#### #[kind(precedence = "6")]
/// EqEq,
/// /// `!=`
/// //#### #[kind(precedence = "6")]
/// NotEq,
/// /// `===`
/// //#### #[kind(precedence = "6")]
/// EqEqEq,
/// /// `!==`
/// //#### #[kind(precedence = "6")]
/// NotEqEq,
/// /// `<`
/// //#### #[kind(precedence = "7")]
/// Lt,
/// /// `<=`
/// //#### #[kind(precedence = "7")]
/// LtEq,
/// /// `>`
/// //#### #[kind(precedence = "7")]
/// Gt,
/// //#### #[kind(precedence = "7")]
/// /// `>=`
/// //#### #[kind(precedence = "7")]
/// GtEq,
/// /// `<<`
/// //#### #[kind(precedence = "8")]
/// LShift,
/// /// `>>`
/// //#### #[kind(precedence = "8")]
/// RShift,
/// /// `>>>`
/// //#### #[kind(precedence = "8")]
/// ZeroFillRShift,
/// /// `+`
/// //#### #[kind(precedence = "9")]
/// Plus,
/// /// `-`
/// //#### #[kind(precedence = "9")]
/// Minus,
/// /// `*`
/// //#### #[kind(precedence = "10")]
/// Mul,
/// /// `/`
/// //#### #[kind(precedence = "10")]
/// Div,
/// /// `%`
/// //#### #[kind(precedence = "10")]
/// Mod,
/// /// `|`
/// //#### #[kind(precedence = "3")]
/// BitOr,
/// /// `^`
/// //#### #[kind(precedence = "4")]
/// BitXor,
/// /// `&`
/// //#### #[kind(precedence = "5")]
/// BitAnd,
/// /// `in`
/// //#### #[kind(precedence = "7")]
/// In,
/// /// `instanceof`
/// //#### #[kind(precedence = "7")]
/// InstanceOf,
/// /// `**`
/// //#### #[kind(precedence = "10")]
/// Exp,
/// /// `||`
/// //#### #[kind(precedence = "1")]
/// LogicalOr,
/// /// `&&`
/// //#### #[kind(precedence = "2")]
/// LogicalAnd,
/// }
///
/// # fn main() {
/// # }
/// ```
///
///
///
///
///
///
///
///
#[proc_macro_derive(Kind, attributes(kind))]
pub fn derive_kind(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse::<syn::DeriveInput>(input)
.map(From::from)
.expect("failed to parse derive input");
let item = expand::expand(input);
let tokens = item.into_tokens();
let tokens = item.into_token_stream();
// println!("Expanded:{}", tokens);

View File

@ -1,4 +1,5 @@
use common::prelude::*;
use swc_macros_common::prelude::*;
use input::*;
use std::fmt::Display;
use std::ops::AddAssign;
@ -81,7 +82,7 @@ impl Synom for FnDef {
syn!(token::Eq) >>
return_type: syn!(LitStr) >>
({
if name.as_ref() == "delegate" {
if name == "delegate" {
panic!("function name cannot be `delegate`")
}
@ -170,31 +171,23 @@ where
I: IntoIterator<Item = TokenTree>,
{
let mut tts = tts.into_iter();
let mut tt = tts.next();
let tt = tts.next();
match tt {
Some(TokenTree {
kind: TokenNode::Group(Delimiter::Parenthesis, tokens),
span,
}) => {
Some(TokenTree::Group(ref g)) if g.delimiter() == Delimiter::Parenthesis => {
if tts.next().is_none() {
return tokens;
return g.stream();
}
tt = Some(TokenTree {
kind: TokenNode::Group(Delimiter::Parenthesis, tokens),
span,
});
g.stream()
}
_ => {}
tt => panic!(
"expected tokens to be wrapped in a paren like #[kind(tokens)]\ngot {}",
match tt {
Some(ref tt) => tt as &Display,
None => &"None" as &Display,
}
),
}
panic!(
"expected tokens to be wrpped in a paren like #[kind(tokens)]\ngot {}",
match tt {
Some(ref tt) => tt as &Display,
None => &"None" as &Display,
}
)
}
let mut res = Default::default();
@ -222,16 +215,20 @@ fn parse_str_as_tokens<T>(lit: LitStr) -> T
where
T: Synom,
{
let span = lit.span;
let span = lit.span();
// WTF? Literal does not provide a way to get string...
let tt = lit.value();
// TODO:Remove '"' only for first and last.
let tts = tt.replace("\"", "")
let tts = tt
.replace("\"", "")
.parse::<TokenStream>()
.expect("failed to create TokenStream for return type")
.into_iter()
.map(|tt| TokenTree { span, ..tt })
.map(|mut tt| {
tt.set_span(span);
tt
})
.collect::<TokenStream>();
parse(tts.into()).expect("failed to parse string literal")

View File

@ -11,7 +11,7 @@ pub fn is_bool(ty: &Type) -> bool {
},
}) => {
// check for bool
if segments.len() == 1 && segments.first().unwrap().value().ident.as_ref() == "bool" {
if segments.len() == 1 && segments.first().unwrap().value().ident == "bool" {
return true;
}
}

View File

@ -7,8 +7,10 @@ pub enum Tokens {
#[kind(is_a)]
#[kind(prec = "7")]
A,
#[kind(prec = "6")] StructLike {},
#[kind(prec = "5")] TupleLike(u8),
#[kind(prec = "6")]
StructLike {},
#[kind(prec = "5")]
TupleLike(u8),
#[kind(prec = "6")]
#[cfg(feature = "not-used")]
@ -25,13 +27,16 @@ fn simple_bool() {
#[derive(Debug, Kind)]
#[kind(functions(wanted = "bool"))]
pub enum Delegate {
#[kind(wanted)] Wanted,
#[kind(delegate)] May(Del),
#[kind(wanted)]
Wanted,
#[kind(delegate)]
May(Del),
}
#[derive(Debug, Kind)]
#[kind(functions(wanted = "bool"))]
pub enum Del {
#[kind(wanted)] Yes,
#[kind(wanted)]
Yes,
No,
}

View File

@ -1,5 +1,4 @@
//! Macros used by swc project.
#![feature(macro_reexport)]
#![feature(proc_macros)]
#![allow(unused_imports)]

View File

@ -8,12 +8,12 @@ proc-macro = true
[dependencies]
swc_macros_common = { path = "../common" }
pmutil = "0.1"
proc-macro2 = "0.2"
quote = "0.4"
pmutil = "0.2"
proc-macro2 = "0.4.4"
quote = "0.6.3"
[dependencies.syn]
version = "0.12"
version = "0.14.1"
features = ["full", "parsing", "printing", "extra-traits"]

View File

@ -39,18 +39,17 @@ use swc_macros_common::prelude::*;
///
///
///```
///
/// #[macro_use]
/// extern crate string_enum;
///
/// #[derive(StringEnum)]
/// pub enum Tokens {
/// /// `a`
/// A,
/// ///`struct-like`
/// StructLike {},
/// /// `tuple-like`
/// TupleLike(u8),
/// /// `a`
/// A,
/// ///`struct-like`
/// StructLike {},
/// /// `tuple-like`
/// TupleLike(u8),
/// }
/// # fn main() {
///
@ -71,7 +70,7 @@ pub fn derive_string_enum(input: proc_macro::TokenStream) -> proc_macro::TokenSt
let input = syn::parse::<syn::DeriveInput>(input)
.map(From::from)
.expect("failed to parse derive input");
let mut tts = Tokens::new();
let mut tts = TokenStream::new();
quote_spanned!(def_site() => extern crate std;).to_tokens(&mut tts);
@ -85,7 +84,7 @@ pub fn derive_string_enum(input: proc_macro::TokenStream) -> proc_macro::TokenSt
tts.into()
}
fn derive_fmt(i: &DeriveInput, trait_path: Tokens) -> ItemImpl {
fn derive_fmt(i: &DeriveInput, trait_path: TokenStream) -> ItemImpl {
Quote::new(Span::def_site())
.quote_with(smart_quote!(
Vars {
@ -149,22 +148,22 @@ fn make_as_str(i: &DeriveInput) -> ItemImpl {
Arm {
body,
attrs: v.attrs()
attrs: v
.attrs()
.iter()
.filter(|attr| is_attr_name(attr, "cfg"))
.cloned()
.collect(),
pats: vec![
Element::End(Pat::Ref(PatRef {
and_token: def_site(),
mutability: None,
pat,
})),
].into_iter()
pats: vec![Element::End(Pat::Ref(PatRef {
and_token: def_site(),
mutability: None,
pat,
}))].into_iter()
.collect(),
guard: None,
rocket_token: def_site(),
fat_arrow_token: def_site(),
comma: Some(def_site()),
leading_vert: None,
}
})
.collect();

View File

@ -1 +1 @@
nightly-2018-02-01
nightly-2018-05-30

View File

@ -1,18 +1,20 @@
extern crate rayon;
extern crate rustc_data_structures;
extern crate slog;
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecmascript;
pub extern crate swc_macros;
use rustc_data_structures::sync::Lrc;
use slog::Logger;
use std::path::Path;
use std::rc::Rc;
use swc_common::errors::{CodeMap, Handler};
use swc_ecmascript::ast::Module;
use swc_ecmascript::parser::{FileMapInput, PResult, Parser, Session as ParseSess};
pub struct Compiler {
codemap: Rc<CodeMap>,
codemap: Lrc<CodeMap>,
threads: rayon::ThreadPool,
logger: Logger,
handler: Handler,
@ -21,7 +23,7 @@ pub struct Compiler {
impl Compiler {
pub fn new(
logger: Logger,
codemap: Rc<CodeMap>,
codemap: Lrc<CodeMap>,
handler: Handler,
threads: rayon::ThreadPool,
) -> Self {
@ -35,7 +37,8 @@ impl Compiler {
/// TODO
pub fn parse_js(&self, path: &Path) -> PResult<Module> {
let fm = self.codemap
let fm = self
.codemap
.load_file_and_lines(path)
.expect("failed to load file");

View File

@ -8,5 +8,7 @@ swc_common = { path = "../common" }
slog = "2"
slog-envlogger = "2.1"
slog-term = "2.3"
rustc-ap-syntax_pos = "150"
rustc-ap-rustc_data_structures = "150"
lazy_static = "1"
regex = "0.2.5"

View File

@ -1,15 +1,15 @@
use super::StdErr;
use rustc_data_structures::sync::Lrc;
use std::io::{self, Write};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::RwLock;
use swc_common::errors::{CodeMap, EmitterWriter, Handler, HandlerFlags};
/// Creates a new handler for testing.
pub(crate) fn new_handler(cm: Rc<CodeMap>) -> (Handler, BufferedError) {
pub(crate) fn new_handler(cm: Lrc<CodeMap>) -> (Handler, BufferedError) {
let buf: BufferedError = Default::default();
let e = EmitterWriter::new(box buf.clone(), Some(cm), false);
let e = EmitterWriter::new(box buf.clone(), Some(cm), false, true);
let handler = Handler::with_emitter(
box e,

View File

@ -6,14 +6,18 @@
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate rustc_data_structures;
extern crate syntax_pos;
#[macro_use]
extern crate slog;
extern crate slog_envlogger;
extern crate slog_term;
extern crate swc_common;
extern crate test;
pub use self::output::{NormalizedOutput, StdErr, StdOut, TestOutput};
use regex::Regex;
use rustc_data_structures::sync::Lrc;
use slog::Drain;
use slog::Logger;
use std::fmt::Debug;
@ -21,10 +25,9 @@ use std::fs::{create_dir_all, File};
use std::io;
use std::io::Write;
use std::path::Path;
use std::rc::Rc;
use std::thread;
use swc_common::{FoldWith, Folder, Span};
use swc_common::errors::{CodeMap, FilePathMapping, Handler};
use swc_common::{FoldWith, Folder, Span};
#[macro_use]
mod macros;
@ -34,11 +37,12 @@ mod paths;
pub fn run_test<F, Ret>(op: F) -> TestOutput<Ret>
where
F: FnOnce(Logger, Rc<CodeMap>, &Handler) -> Ret,
F: FnOnce(Logger, Lrc<CodeMap>, &Handler) -> Ret,
{
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
let (handler, errors) = self::errors::new_handler(cm.clone());
let result = op(logger(), cm, &handler);
let result =
syntax_pos::GLOBALS.set(&syntax_pos::Globals::new(), || op(logger(), cm, &handler));
TestOutput {
errors: errors.into(),
@ -81,9 +85,7 @@ pub fn print_left_right(left: &Debug, right: &Debug) -> String {
// Replace 'Span { lo: BytePos(0), hi: BytePos(0), ctxt: #0 }' with '_'
let s = {
lazy_static! {
static ref RE: Regex = {
Regex::new("Span \\{[\\a-zA-Z0#:\\(\\)]*\\}").unwrap()
};
static ref RE: Regex = { Regex::new("Span \\{[\\a-zA-Z0#:\\(\\)]*\\}").unwrap() };
}
&RE
@ -91,9 +93,7 @@ pub fn print_left_right(left: &Debug, right: &Debug) -> String {
// Remove 'span: _,'
let s = {
lazy_static! {
static ref RE: Regex = {
Regex::new("span: _[,]?\\s*").unwrap()
};
static ref RE: Regex = { Regex::new("span: _[,]?\\s*").unwrap() };
}
&RE
@ -105,7 +105,8 @@ pub fn print_left_right(left: &Debug, right: &Debug) -> String {
let (left, right) = (print(left), print(right));
let cur = thread::current();
let test_name = cur.name()
let test_name = cur
.name()
.expect("rustc sets test name as the name of thread");
// ./target/debug/tests/${test_name}/
@ -138,12 +139,12 @@ pub fn print_left_right(left: &Debug, right: &Debug) -> String {
#[macro_export]
macro_rules! assert_eq_ignore_span {
($l:expr, $r:expr) => {{
println!("{}", module_path!() );
println!("{}", module_path!());
let (l, r) = ($crate::drop_span($l), $crate::drop_span($r));
if l != r {
panic!("assertion failed\n{}", $crate::print_left_right(&l, &r));
}
}}
}};
}
pub fn logger() -> Logger {
@ -151,8 +152,8 @@ pub fn logger() -> Logger {
Ok(())
}
fn root() -> Logger {
use {slog_envlogger, slog_term};
use std::sync::Mutex;
use {slog_envlogger, slog_term};
let dec = slog_term::TermDecorator::new()
.force_color()

View File

@ -28,8 +28,8 @@ pub struct Diff {
/// See https://github.com/rust-lang/rust/blob/b224fc84e3/src/test/COMPILER_TESTS.md#normalization
///
/// - The `CARGO_MANIFEST_DIR` directory is replaced with `$DIR`.
/// - All backslashes (\) within same line as `$DIR` are converted to forward slashes (/) (for
/// Windows) - All CR LF newlines are converted to LF
/// - All backslashes (\) within same line as `$DIR` are converted to forward
/// slashes (/) (for Windows) - All CR LF newlines are converted to LF
///
/// - `normalize-stdout` is not implemented (yet?).
#[derive(Debug, Clone, Ord, PartialOrd, PartialEq, Eq, Default, Hash)]
@ -43,8 +43,8 @@ impl fmt::Display for NormalizedOutput {
impl NormalizedOutput {
/// If output differs, prints actual stdout/stderr to
/// `CARGO_MANIFEST_DIR/target/swc-test-results/ui/$rel_path` where `$rel_path`:
/// `path.strip_prefix(CARGO_MANIFEST_DIR)`
/// `CARGO_MANIFEST_DIR/target/swc-test-results/ui/$rel_path` where
/// `$rel_path`: `path.strip_prefix(CARGO_MANIFEST_DIR)`
pub fn compare_to_file<P>(self, path: P) -> Result<(), Diff>
where
P: AsRef<Path>,