25: Error reporting like rustc r=kdy1 a=kdy1

Error reporting is implementing using rustc
This commit is contained in:
bors[bot] 2018-01-22 13:09:21 +00:00
commit e2f75ec0a2
44 changed files with 2392 additions and 954 deletions

View File

@ -1,4 +1,11 @@
[build]
rustflags = ["--cfg", "procmacro2_semver_exempt"]
rustdocflags = ["--cfg", "procmacro2_semver_exempt"]
rustdocflags = [
"--cfg", "procmacro2_semver_exempt",
"--no-defaults",
"--passes", "collapse-docs",
"--passes", "unindent-comments",
# "--document-private-items",
"--passes", "strip-priv-imports"
]

3
.gitignore vendored
View File

@ -1,5 +1,6 @@
/target/
target/
**/*.bk
core
*.log

View File

@ -6,10 +6,19 @@ version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
swc_atoms = { path = "./atoms" }
swc_ecmascript = { path = "./ecmascript" }
libswc = { path = "./swc" }
swc_common = { path = "./common" }
swc_macros = { path = "./macros" }
rayon = "0.9"
slog = "2"
slog-envlogger = "2.1"
slog-term = "2.3"
[dependencies.clap]
version = "2.29"
default-features = false
features = [ "suggestions", "color" ]
[profile.bench]

View File

@ -4,6 +4,14 @@ version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[dependencies]
atty = "0.2"
fnv = "1"
string_cache = "0.6"
either = "1.4"
either = "1.4"
rustc-ap-rustc_errors = "16"
rustc-ap-syntax_pos = "16"
[dependencies.syntect]
version = "2"
default-features = false

View File

@ -0,0 +1,577 @@
use rustc_errors::CodeMapper;
use std::{env, fs};
use std::cell::{Ref, RefCell};
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::rc::Rc;
use syntax_pos::*;
pub struct CodeMap {
files: RefCell<Vec<Rc<FileMap>>>,
file_loader: Box<FileLoader>,
path_mapping: FilePathMapping,
}
impl CodeMap {
pub fn new(path_mapping: FilePathMapping) -> Self {
Self::with_file_loader(box RealFileLoader, path_mapping)
}
pub fn with_file_loader(file_loader: Box<FileLoader>, path_mapping: FilePathMapping) -> Self {
CodeMap {
files: Default::default(),
file_loader,
path_mapping,
}
}
pub fn path_mapping(&self) -> &FilePathMapping {
&self.path_mapping
}
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = self.file_loader.read_file(path)?;
let filename = path.to_owned().into();
Ok(self.new_filemap(filename, src))
}
pub fn load_file_and_lines(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = self.file_loader.read_file(path)?;
let filename = path.to_owned().into();
Ok(self.new_filemap_and_lines(filename, src))
}
pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> {
self.files.borrow()
}
fn next_start_pos(&self) -> usize {
let files = self.files.borrow();
match files.last() {
None => 0,
// Add one so there is some space between files. This lets us distinguish
// positions in the codemap, even in the presence of zero-length files.
Some(last) => last.end_pos.to_usize() + 1,
}
}
/// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
// The path is used to determine the directory for loading submodules and
// include files, so it must be before remapping.
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
// but this is okay because the directory determined by `path.pop()` will
// be empty, so the working directory will be used.
let unmapped_path = filename.clone();
let (filename, was_remapped) = match filename {
FileName::Real(filename) => {
let (filename, was_remapped) = self.path_mapping.map_prefix(filename);
(FileName::Real(filename), was_remapped)
}
other => (other, false),
};
let filemap = Rc::new(FileMap::new(
filename,
was_remapped,
unmapped_path,
src,
Pos::from_usize(start_pos),
));
files.push(filemap.clone());
filemap
}
/// Creates a new filemap and sets its line information.
pub fn new_filemap_and_lines(&self, filename: FileName, src: String) -> Rc<FileMap> {
// TODO
let fm = self.new_filemap(filename, src.clone());
let mut byte_pos: u32 = fm.start_pos.0;
for line in src.lines() {
// register the start of this line
fm.next_line(BytePos(byte_pos));
// update byte_pos to include this line and the \n at the end
byte_pos += line.len() as u32 + 1;
}
fm
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
let chpos = self.bytepos_to_file_charpos(pos);
match self.lookup_line(pos) {
Ok(FileMapAndLine { fm: f, line: a }) => {
let line = a + 1; // Line numbers start at 1
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
let col = chpos - linechpos;
let col_display = {
let non_narrow_chars = f.non_narrow_chars.borrow();
let start_width_idx = non_narrow_chars
.binary_search_by_key(&linebpos, |x| x.pos())
.unwrap_or_else(|x| x);
let end_width_idx = non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
let special_chars = end_width_idx - start_width_idx;
let non_narrow: usize = non_narrow_chars[start_width_idx..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
col.0 - special_chars + non_narrow
};
// debug!(
// "byte pos {:?} is on the line at byte pos {:?}",
// pos, linebpos
// );
// debug!(
// "char pos {:?} is on the line at char pos {:?}",
// chpos, linechpos
// );
// debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line,
col,
col_display,
}
}
Err(f) => {
let col_display = {
let non_narrow_chars = f.non_narrow_chars.borrow();
let end_width_idx = non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
let non_narrow: usize = non_narrow_chars[0..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
chpos.0 - end_width_idx + non_narrow
};
Loc {
file: f,
line: 0,
col: chpos,
col_display,
}
}
}
}
// If the relevant filemap is empty, we don't return a line number.
pub fn lookup_line(&self, pos: BytePos) -> Result<FileMapAndLine, Rc<FileMap>> {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
match f.lookup_line(pos) {
Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
None => Err(f),
}
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.clone(),
line: loc.line,
col: loc.col,
file: Some(loc.file),
}
}
/// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
/// there are gaps between lhs and rhs, the resulting union will cross these gaps.
/// For this to work, the spans have to be:
///
/// * the ctxt of both spans much match
/// * the lhs span needs to end on the same line the rhs span begins
/// * the lhs span must start at or before the rhs span
pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
// make sure we're at the same expansion id
if sp_lhs.ctxt() != sp_rhs.ctxt() {
return None;
}
let lhs_end = match self.lookup_line(sp_lhs.hi()) {
Ok(x) => x,
Err(_) => return None,
};
let rhs_begin = match self.lookup_line(sp_rhs.lo()) {
Ok(x) => x,
Err(_) => return None,
};
// if we must cross lines to merge, don't merge
if lhs_end.line != rhs_begin.line {
return None;
}
// ensure these follow the expected order and we don't overlap
if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) {
Some(sp_lhs.to(sp_rhs))
} else {
None
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo());
let hi = self.lookup_char_pos_adj(sp.hi());
return (format!(
"{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_usize() + 1,
hi.line,
hi.col.to_usize() + 1
)).to_string();
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo()).file.name.clone()
}
pub fn span_to_unmapped_path(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo())
.file
.unmapped_path
.clone()
.expect("CodeMap::span_to_unmapped_path called for imported FileMap?")
}
pub fn is_multiline(&self, sp: Span) -> bool {
let lo = self.lookup_char_pos(sp.lo());
let hi = self.lookup_char_pos(sp.hi());
lo.line != hi.line
}
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
// debug!("span_to_lines(sp={:?})", sp);
if sp.lo() > sp.hi() {
return Err(SpanLinesError::IllFormedSpan(sp));
}
let lo = self.lookup_char_pos(sp.lo());
// debug!("span_to_lines: lo={:?}", lo);
let hi = self.lookup_char_pos(sp.hi());
// debug!("span_to_lines: hi={:?}", hi);
if lo.file.start_pos != hi.file.start_pos {
return Err(SpanLinesError::DistinctSources(DistinctSources {
begin: (lo.file.name.clone(), lo.file.start_pos),
end: (hi.file.name.clone(), hi.file.start_pos),
}));
}
assert!(hi.line >= lo.line);
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
// The span starts partway through the first line,
// but after that it starts from offset 0.
let mut start_col = lo.col;
// For every line but the last, it extends from `start_col`
// and to the end of the line. Be careful because the line
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
// lines.
for line_index in lo.line - 1..hi.line - 1 {
let line_len = lo.file
.get_line(line_index)
.map(|s| s.chars().count())
.unwrap_or(0);
lines.push(LineInfo {
line_index,
start_col,
end_col: CharPos::from_usize(line_len),
});
start_col = CharPos::from_usize(0);
}
// For the last line, it extends from `start_col` to `hi.col`:
lines.push(LineInfo {
line_index: hi.line - 1,
start_col,
end_col: hi.col,
});
Ok(FileLines {
file: lo.file,
lines: lines,
})
}
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
if sp.lo() > sp.hi() {
return Err(SpanSnippetError::IllFormedSpan(sp));
}
let local_begin = self.lookup_byte_offset(sp.lo());
let local_end = self.lookup_byte_offset(sp.hi());
if local_begin.fm.start_pos != local_end.fm.start_pos {
return Err(SpanSnippetError::DistinctSources(DistinctSources {
begin: (local_begin.fm.name.clone(), local_begin.fm.start_pos),
end: (local_end.fm.name.clone(), local_end.fm.start_pos),
}));
} else {
let start_index = local_begin.pos.to_usize();
let end_index = local_end.pos.to_usize();
let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize();
if start_index > end_index || end_index > source_len {
return Err(SpanSnippetError::MalformedForCodemap(
MalformedCodemapPositions {
name: local_begin.fm.name.clone(),
source_len,
begin_pos: local_begin.pos,
end_pos: local_end.pos,
},
));
}
if let Some(ref src) = local_begin.fm.src {
return Ok((&src[start_index..end_index]).to_string());
} else {
return Err(SpanSnippetError::SourceNotAvailable {
filename: local_begin.fm.name.clone(),
});
}
}
}
/// Given a `Span`, try to get a shorter span ending before the first
/// occurrence of `c` `char`
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
match self.span_to_snippet(sp) {
Ok(snippet) => {
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
if !snippet.is_empty() && !snippet.contains('\n') {
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
} else {
sp
}
}
_ => sp,
}
}
/// Given a `Span`, try to get a shorter span ending just after the first
/// occurrence of `char` `c`.
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
if let Ok(snippet) = self.span_to_snippet(sp) {
if let Some(offset) = snippet.find(c) {
return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32));
}
}
sp
}
pub fn def_span(&self, sp: Span) -> Span {
self.span_until_char(sp, '{')
}
pub fn get_filemap(&self, filename: &FileName) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if *filename == fm.name {
return Some(fm.clone());
}
}
None
}
/// For a global BytePos compute the local offset within the containing
/// FileMap
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {
fm: fm,
pos: offset,
}
}
/// Converts an absolute BytePos to a CharPos relative to the filemap.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
// debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
}
// Return the index of the filemap (in self.files) which contains pos.
pub fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
let files = self.files.borrow();
let files = &*files;
let count = files.len();
// Binary search for the filemap.
let mut a = 0;
let mut b = count;
while b - a > 1 {
let m = (a + b) / 2;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
assert!(
a < count,
"position {} does not resolve to a source location",
pos.to_usize()
);
return a;
}
fn ensure_filemap_source_present(&self, file_map: Rc<FileMap>) -> bool {
file_map.add_external_src(|| match file_map.name {
FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
_ => None,
})
}
pub fn count_lines(&self) -> usize {
self.files().iter().fold(0, |a, f| a + f.count_lines())
}
}
#[derive(Clone)]
pub struct FilePathMapping {
mapping: Vec<(PathBuf, PathBuf)>,
}
impl FilePathMapping {
pub fn empty() -> FilePathMapping {
FilePathMapping { mapping: vec![] }
}
pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
FilePathMapping { mapping }
}
/// Applies any path prefix substitution as defined by the mapping.
/// The return value is the remapped path and a boolean indicating whether
/// the path was affected by the mapping.
pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
// NOTE: We are iterating over the mapping entries from last to first
// because entries specified later on the command line should
// take precedence.
for &(ref from, ref to) in self.mapping.iter().rev() {
if let Ok(rest) = path.strip_prefix(from) {
return (to.join(rest), true);
}
}
(path, false)
}
}
/// An abstraction over the fs operations used by the Parser.
pub trait FileLoader {
/// Query the existence of a file.
fn file_exists(&self, path: &Path) -> bool;
/// Return an absolute path to a file, if possible.
fn abs_path(&self, path: &Path) -> Option<PathBuf>;
/// Read the contents of an UTF-8 file into memory.
fn read_file(&self, path: &Path) -> io::Result<String>;
}
/// A FileLoader that uses std::fs to load real files.
pub struct RealFileLoader;
impl FileLoader for RealFileLoader {
fn file_exists(&self, path: &Path) -> bool {
fs::metadata(path).is_ok()
}
fn abs_path(&self, path: &Path) -> Option<PathBuf> {
if path.is_absolute() {
Some(path.to_path_buf())
} else {
env::current_dir().ok().map(|cwd| cwd.join(path))
}
}
fn read_file(&self, path: &Path) -> io::Result<String> {
let mut src = String::new();
fs::File::open(path)?.read_to_string(&mut src)?;
Ok(src)
}
}
impl CodeMapper for CodeMap {
fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_char_pos(pos)
}
fn span_to_lines(&self, sp: Span) -> FileLinesResult {
self.span_to_lines(sp)
}
fn span_to_string(&self, sp: Span) -> String {
self.span_to_string(sp)
}
fn span_to_filename(&self, sp: Span) -> FileName {
self.span_to_filename(sp)
}
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
self.merge_spans(sp_lhs, sp_rhs)
}
fn call_span_if_macro(&self, sp: Span) -> Span {
if self.span_to_filename(sp.clone()).is_macros() {
let v = sp.macro_backtrace();
if let Some(use_site) = v.last() {
return use_site.call_site;
}
}
sp
}
fn ensure_filemap_source_present(&self, file_map: Rc<FileMap>) -> bool {
self.ensure_filemap_source_present(file_map)
}
/// No op.
fn doctest_offset_line(&self, orig: usize) -> usize {
orig
}
}

View File

@ -0,0 +1,173 @@
use super::Handler;
use {MultiSpan, Span};
use rustc_errors::{Diagnostic as RustcDiagnostic, DiagnosticBuilder as Builder, Level};
pub use rustc_errors::{DiagnosticId, DiagnosticStyledString};
use std::fmt;
#[must_use]
pub struct Diagnostic<'a> {
db: Box<Builder<'a>>,
}
impl<'a> Diagnostic<'a> {
pub fn new(handler: &'a Handler, level: Level, msg: &str) -> Self {
Self::new_with_code(handler, level, None, msg)
}
pub fn new_with_code(
handler: &'a Handler,
level: Level,
code: Option<DiagnosticId>,
msg: &str,
) -> Self {
Diagnostic {
db: box Builder::new_diagnostic(
&handler.inner,
RustcDiagnostic::new_with_code(level, code, msg),
),
}
}
pub fn emit(mut self) {
self.db.emit()
}
/// Cancel the diagnostic (a structured diagnostic must either be emitted or
/// canceled or it will panic when dropped).
pub fn cancel(mut self) -> Self {
self.db.cancel();
self
}
pub fn cancelled(&self) -> bool {
self.db.cancelled()
}
/// Add a span/label to be included in the resulting snippet.
/// This is pushed onto the `MultiSpan` that was created when the
/// diagnostic was first built. If you don't call this function at
/// all, and you just supplied a `Span` to create the diagnostic,
/// then the snippet will just include that `Span`, which is
/// called the primary span.
pub fn span_label<T: Into<String>>(mut self, span: Span, label: T) -> Self {
self.db.span_label(span, label.into());
self
}
pub fn note_expected_found(
mut self,
label: &fmt::Display,
expected: DiagnosticStyledString,
found: DiagnosticStyledString,
) -> Self {
self.db.note_expected_found(label, expected, found);
self
}
pub fn note_expected_found_extra(
mut self,
label: &fmt::Display,
expected: DiagnosticStyledString,
found: DiagnosticStyledString,
expected_extra: &fmt::Display,
found_extra: &fmt::Display,
) -> Self {
self.db
.note_expected_found_extra(label, expected, found, expected_extra, found_extra);
self
}
pub fn note(mut self, msg: &str) -> Self {
self.db.note(msg);
self
}
// pub fn highlighted_note(mut self, msg: Vec<(String, Style)>) -> Self {
// self.db.highlighted_note(msg);
// self
// }
pub fn span_note<S: Into<MultiSpan>>(mut self, sp: S, msg: &str) -> Self {
self.db.span_note(sp, msg);
self
}
pub fn warn(mut self, msg: &str) -> Self {
self.db.warn(msg);
self
}
pub fn span_warn<S: Into<MultiSpan>>(mut self, sp: S, msg: &str) -> Self {
self.db.span_warn(sp, msg);
self
}
pub fn help(mut self, msg: &str) -> Self {
self.db.help(msg);
self
}
pub fn span_help<S: Into<MultiSpan>>(mut self, sp: S, msg: &str) -> Self {
self.db.span_help(sp, msg);
self
}
/// Prints out a message with a suggested edit of the code. If the suggestion is presented
/// inline it will only show the text message and not the text.
///
/// See `CodeSuggestion` for more information.
pub fn span_suggestion_short(mut self, sp: Span, msg: &str, suggestion: String) -> Self {
self.db.span_suggestion_short(sp, msg, suggestion);
self
}
/// Prints out a message with a suggested edit of the code.
///
/// In case of short messages and a simple suggestion,
/// rustc displays it as a label like
///
/// "try adding parentheses: `(tup.0).1`"
///
/// The message
///
/// * should not end in any punctuation (a `:` is added automatically)
/// * should not be a question
/// * should not contain any parts like "the following", "as shown"
/// * may look like "to do xyz, use" or "to do xyz, use abc"
/// * may contain a name of a function, variable or type, but not whole expressions
///
/// See `CodeSuggestion` for more information.
pub fn span_suggestion(mut self, sp: Span, msg: &str, suggestion: String) -> Self {
self.db.span_suggestion(sp, msg, suggestion);
self
}
/// Prints out a message with multiple suggested edits of the code.
pub fn span_suggestions(mut self, sp: Span, msg: &str, suggestions: Vec<String>) -> Self {
self.db.span_suggestions(sp, msg, suggestions);
self
}
pub fn span<S: Into<MultiSpan>>(mut self, sp: S) -> Self {
self.db.set_span(sp);
self
}
pub fn code(mut self, s: DiagnosticId) -> Self {
self.db.code(s);
self
}
// /// Used by a lint. Copies over all details *but* the "main
// /// message".
// pub fn copy_details_not_message(mut self, from: &Diagnostic) {
// self.span = from.span.clone();
// self.code = from.code.clone();
// self.children.extend(from.children.iter().cloned())
// }
}
impl<'a> From<Builder<'a>> for Diagnostic<'a> {
fn from(db: Builder<'a>) -> Self {
Diagnostic { db: box db }
}
}

View File

@ -0,0 +1,66 @@
use super::Diagnostic;
use Span;
use rustc_errors::{CodeMapper, ColorConfig, Diagnostic as RustcDiagnostic,
Handler as RustcHandler, HandlerFlags, Level};
use std::rc::Rc;
/// A handler deals with errors.
pub struct Handler {
pub(super) inner: RustcHandler,
}
impl Handler {
pub fn with_tty_emitter(
color_config: ColorConfig,
can_emit_warnings: bool,
treat_err_as_bug: bool,
cm: Option<Rc<CodeMapper>>,
) -> Self {
RustcHandler::with_tty_emitter(color_config, can_emit_warnings, treat_err_as_bug, cm).into()
}
pub fn with_tty_emitter_and_flags(
color_config: ColorConfig,
cm: Option<Rc<CodeMapper>>,
flags: HandlerFlags,
) -> Self {
RustcHandler::with_tty_emitter_and_flags(color_config, cm, flags).into()
}
pub fn note<'a, 'b>(&'a self, sp: Span, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Note, msg).span(sp)
}
pub fn warn<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
self.inner.struct_warn(msg).into()
}
pub fn error<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Error, msg)
}
pub fn fatal<'a, 'b>(&'a self, msg: &'b str) -> Diagnostic<'a> {
Diagnostic::new(self, Level::Fatal, msg)
}
pub fn has_errors(&self) -> bool {
self.inner.has_errors()
}
pub fn abort_if_errors(&self) {
self.inner.abort_if_errors()
}
pub fn track_diagnostics<F, R>(&self, f: F) -> (R, Vec<RustcDiagnostic>)
where
F: FnOnce() -> R,
{
self.inner.track_diagnostics(f)
}
}
impl From<RustcHandler> for Handler {
fn from(inner: RustcHandler) -> Self {
Handler { inner }
}
}

20
common/src/errors/mod.rs Normal file
View File

@ -0,0 +1,20 @@
//! Error reporting for the swc project.
//!
//! -----
//!
//! This module is almost copied from [rustc_errors][].
//!
//!
//![rustc_errors]:http://manishearth.github.io/rust-internals-docs/rustc_errors/
pub use self::codemap::{CodeMap, FileLoader, FilePathMapping, RealFileLoader};
pub use self::diagnostic::*;
pub use self::handler::*;
pub use rustc_errors::{ColorConfig, Level};
pub use rustc_errors::Level::*;
mod codemap;
mod diagnostic;
mod handler;
#[cfg(test)]
mod tests;

View File

@ -0,0 +1,66 @@
use super::*;
use {BytePos, Span};
use std::io;
use std::path::{Path, PathBuf};
use std::rc::Rc;
struct MyFileLoader;
impl FileLoader for MyFileLoader {
/// Query the existence of a file.
fn file_exists(&self, path: &Path) -> bool {
println!("File exists?: {}", path.display());
true
}
/// Return an absolute path to a file, if possible.
fn abs_path(&self, path: &Path) -> Option<PathBuf> {
Some("/tmp.js".into())
}
/// Read the contents of an UTF-8 file into memory.
fn read_file(&self, path: &Path) -> io::Result<String> {
Ok("
function foo() {
with (window) {
}
}"
.into())
}
}
#[test]
fn test() {
let cm = CodeMap::with_file_loader(box MyFileLoader, FilePathMapping::empty());
let file_map = cm.load_file_and_lines("tmp.js".as_ref())
.expect("failed to load tmp.js");
println!(
"File (start={},end={})",
file_map.start_pos.0, file_map.end_pos.0
);
let start_pos = file_map.start_pos + BytePos(1);
let end_pos = file_map.end_pos - BytePos(1);
let full = Span::new(start_pos, end_pos, Default::default());
let handler = Handler::with_tty_emitter(ColorConfig::Always, false, false, Some(Rc::new(cm)));
Diagnostic::new_with_code(
&handler,
Error,
Some(DiagnosticId::Error("ABCDE".into())),
"Test span_label",
).span(full)
.emit();
Diagnostic::new_with_code(
&handler,
Warning,
Some(DiagnosticId::Lint("WITH_STMT".into())),
"Lint: With statement",
).span(Span::new(
start_pos + BytePos(21),
start_pos + BytePos(25),
Default::default(),
))
.emit();
}

View File

@ -1,14 +1,21 @@
#![feature(box_syntax)]
#![feature(i128_type)]
#![feature(range_contains)]
#![feature(try_trait)]
#![feature(never_type)]
#![feature(specialization)]
extern crate atty;
extern crate either;
extern crate fnv;
extern crate rustc_errors;
extern crate string_cache;
extern crate syntax_pos;
pub use self::ast_node::AstNode;
pub use self::fold::{FoldWith, Folder};
pub use self::span::{BytePos, Span, Spanned};
pub use self::pos::*;
mod ast_node;
pub mod errors;
mod fold;
mod span;
pub mod pos;

15
common/src/pos/mod.rs Normal file
View File

@ -0,0 +1,15 @@
//! This module is almost copied from [rustc_errors][].
//!
//! Modified a bit because, unlike rustc, we
//!
//! - work with lot of files (node_modules).
//! - need source information in every run (I think?)
//!
//!
//!-----
//!
//![rustc_errors]:TODO
pub use self::span::*;
pub use syntax_pos::{FileMap, FileName, MultiSpan};
mod span;

123
common/src/pos/span.rs Normal file
View File

@ -0,0 +1,123 @@
// use super::FileMap;
// use errors::Msg;
use fold::FoldWith;
// use std::fmt::{self, Debug, Display, Formatter};
// use std::ops::{Add, Sub};
// use std::rc::Rc;
pub use syntax_pos::{BytePos, Span, SpanData, DUMMY_SP};
// /// A new-type struct for position of specific byte.
// ///
// /// See [Span](./struct.Span.html).
// #[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
// pub struct BytePos(pub u32);
// impl Display for BytePos {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// Display::fmt(&self.0, f)
// }
// }
// impl BytePos {
// pub(crate) fn to_usize(self) -> usize {
// self.0 as usize
// }
// pub(crate) fn from_usize(u: usize) -> Self {
// BytePos(u as _)
// }
// }
// impl Debug for BytePos {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// Debug::fmt(&self.0, f)
// }
// }
// impl Add for BytePos {
// type Output = BytePos;
// fn add(self, rhs: BytePos) -> BytePos {
// BytePos(self.0 + rhs.0)
// }
// }
// impl Sub for BytePos {
// type Output = BytePos;
// fn sub(self, rhs: BytePos) -> BytePos {
// BytePos(self.0 - rhs.0)
// }
// }
// /// Byte range of a token or node.
// #[derive(Clone, Copy, PartialEq, Eq, Hash)]
// pub struct Span {
// /// Inclusive
// pub start: BytePos,
// /// Inclusive
// pub end: BytePos,
// }
// impl Debug for Span {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// if self.start == BytePos(0) && self.end == BytePos(0) {
// write!(f, "_")
// } else {
// write!(f, "{}..{}", self.start, self.end.0 + 1)
// }
// }
// }
// impl Span {
// pub fn new(start: BytePos, end: BytePos) -> Self {
// Span { start, end }
// }
// /// temporary method
// pub(crate) fn ctxt(self) -> usize {
// 0
// }
// /// temporary method
// pub(crate) fn lo(self) -> BytePos {
// self.start
// }
// /// temporary method
// pub(crate) fn hi(self) -> BytePos {
// self.end
// }
// /// temporary method
// pub(crate) fn with_hi(self, hi: BytePos) -> Span {
// Span {
// start: self.start,
// end: hi,
// }
// }
// /// Dummy span. This is same as `Span::defult()`.
// pub const DUMMY: Span = Span {
// start: BytePos(0),
// end: BytePos(0),
// };
// }
// impl Default for Span {
// #[inline]
// fn default() -> Self {
// Span::DUMMY
// }
// }
pub trait Spanned<T>: Sized {
/// Creates `Self` from `node` and `span.
fn from_unspanned(node: T, span: Span) -> Self;
}
impl<S, T> Spanned<T> for Box<S>
where
S: Spanned<T>,
{
fn from_unspanned(node: T, span: Span) -> Self {
box S::from_unspanned(node, span)
}
}
impl<F> FoldWith<F> for Span {
/// No op as span does not have any child.
fn fold_children(self, _: &mut F) -> Span {
self
}
}

View File

@ -1,72 +0,0 @@
use fold::FoldWith;
use std::fmt::{self, Debug, Display, Formatter};
/// A new-type struct for position of specific byte.
///
/// See [Span](./struct.Span.html).
#[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct BytePos(pub u32);
impl Display for BytePos {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(&self.0, f)
}
}
impl Debug for BytePos {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Debug::fmt(&self.0, f)
}
}
/// Byte range of a token or node.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Span {
/// Inclusive
pub start: BytePos,
/// Exclusive
pub end: BytePos,
}
impl Debug for Span {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.start == BytePos(0) && self.end == BytePos(0) {
write!(f, "_")
} else {
write!(f, "{}..{}", self.start, self.end)
}
}
}
impl Span {
/// Dummy span. This is same as `Span::defult()`.
pub const DUMMY: Span = Span {
start: BytePos(0),
end: BytePos(0),
};
}
impl Default for Span {
#[inline]
fn default() -> Self {
Span::DUMMY
}
}
pub trait Spanned<T>: Sized {
/// Creates `Self` from `node` and `span.
fn from_unspanned(node: T, span: Span) -> Self;
}
impl<S, T> Spanned<T> for Box<S>
where
S: Spanned<T>,
{
fn from_unspanned(node: T, span: Span) -> Self {
box S::from_unspanned(node, span)
}
}
impl<F> FoldWith<F> for Span {
/// No op as span does not have any child.
fn fold_children(self, _: &mut F) -> Span {
self
}
}

View File

@ -10,7 +10,6 @@ swc_common = { path = "../../common" }
swc_ecma_ast = { path = "../ast" }
parser_macros = { path = "../parser_macros" }
unicode-xid = "0.1"
failure = "0.1"
slog = "2.1"
either = { version = "1.4" }

View File

@ -1,15 +1,75 @@
use self::SyntaxError::*;
use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter};
use swc_atoms::JsWord;
use swc_common::Span;
use swc_common::errors::{Diagnostic, Handler};
use token::Token;
#[derive(Copy, Clone)]
pub(crate) struct Eof<'a> {
pub last: Span,
pub handler: &'a Handler,
}
impl<'a> From<Eof<'a>> for Diagnostic<'a> {
fn from(Eof { handler, last }: Eof<'a>) -> Self {
handler.error("expected some tokens after here").span(last)
}
}
impl<'a> Debug for Eof<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Debug::fmt("<eof>", f)
}
}
pub(crate) struct Error<'a> {
pub handler: &'a Handler,
pub span: Span,
pub error: SyntaxError,
}
#[derive(Debug)]
pub enum SyntaxError {
pub(crate) enum SyntaxError {
LegacyDecimal,
LegacyOctal,
InvalidIdentChar,
// #[fail(display = "unterminated string constant: {:?}", start)]
UnterminatedStrLit,
// #[fail(display = "expected unicode escape sequence: {:?}", pos)]
ExpectedUnicodeEscape,
// #[fail(display = "unexpected escape sequence in reserved word: {:?}", word)]
EscapeInReservedWord {
word: JsWord,
},
// #[fail(display = "unterminated regexp (regexp started at {:?})", start)]
UnterminatedRegxp,
// #[fail(display = "identifier directly after number at {:?}", pos)]
IdentAfterNum,
// #[fail(display = "Unexpected character '{}' at {:?}", c, pos)]
UnexpectedChar {
c: char,
},
// #[fail(display = "Invalid string escape at {:?}", start)]
InvalidStrEscape,
// #[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape,
// #[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint,
/// "implements", "interface", "let", "package",\
/// "private", "protected", "public", "static", or "yield"
InvalidIdentInStrict,
/// 'eval' and 'arguments' are invalid identfier in strict mode.
EvalAndArgumentsInStrict,
UnaryInExp,
UnaryInExp {
left: String,
left_span: Span,
},
LineBreakInThrow,
Expected(&'static Token),
@ -31,8 +91,62 @@ pub enum SyntaxError {
DuplicateLabel(JsWord),
AsyncGenerator,
NonTopLevelImportExport,
/// Destructuring bindings require initializers.
PatVarWithoutInit {
span: Span,
},
PatVarWithoutInit,
}
impl<'a> From<Error<'a>> for Diagnostic<'a> {
fn from(e: Error<'a>) -> Self {
let msg: Cow<'static, _> = match e.error {
LegacyDecimal => "Legacy decimal literal is not permitted in strict mode".into(),
LegacyOctal => "Legacy octal literal is not permitted in strict mode".into(),
InvalidIdentChar => "Invalid character in identifier".into(),
UnterminatedStrLit => "Unterminated string constant".into(),
ExpectedUnicodeEscape => "Expected unicode escape".into(),
EscapeInReservedWord { word } => {
format!("unexpected escape sequence in reserved word: {}", word).into()
}
UnterminatedRegxp => "Unterminated regexp literal".into(),
IdentAfterNum => "Identifier cannot follow number".into(),
UnexpectedChar { c } => format!("Unexpected character '{}'", c).into(),
InvalidStrEscape => "Invalid string escape".into(),
InvalidUnicodeEscape => "Invalid unciode escape".into(),
InvalidCodePoint => "Invalid unciode code point".into(),
InvalidIdentInStrict => "'implements', 'interface', 'let', 'package', 'private', \
'protected', 'public', 'static', or 'yield' cannot be used \
as an identifier in strict mode"
.into(),
EvalAndArgumentsInStrict => {
r#"'eval' and 'arguments' cannot be used as a binding identifier in string mode"#.into()
}
UnaryInExp { .. } => "** cannot be applied to unary expression".into(),
LineBreakInThrow => "LineBreak cannot follow 'throw'".into(),
Expected(token) => format!("Expected {:?}", token).into(),
AwaitStar => "await* has been removed from the async functions proposal. Use
\
// Promise.all() instead."
.into(),
ReservedWordInObjShorthandOrPat => {
"Cannot use a reserved word as a shorthand property".into()
}
MultipleDefault => "A switch block cannot have multiple defaults".into(),
CommaAfterRestElement => "Trailing comma isn't permitted after a rest element".into(),
NonLastRestParam => "Rest element must be final element".into(),
SpreadInParenExpr => "Parenthesized expression cannot contain spread operator".into(),
EmptyParenExpr => "Parenthized exprssion cannot be empty".into(),
ExpectedIdent => "Expected ident".into(),
ExpctedSemi => "Expected ';' or line break".into(),
DuplicateLabel(label) => format!("Label {} is already declared", label).into(),
AsyncGenerator => "An async function cannot be generator".into(),
NonTopLevelImportExport => "'import', and 'export' are not permitted here".into(),
PatVarWithoutInit => "Destructuring bindings require initializers".into(),
};
e.handler.error(&msg).span(e.span)
}
}

View File

@ -1,4 +1,3 @@
use std::fmt::Debug;
use std::str;
use swc_common::BytePos;
@ -9,7 +8,7 @@ pub(super) struct LexerInput<I: Input> {
input: I,
}
impl<I: Input> LexerInput<I> {
impl<'a, I: Input> LexerInput<I> {
pub const fn new(input: I) -> Self {
LexerInput {
input,
@ -61,8 +60,6 @@ impl<I: Input> LexerInput<I> {
pub struct CharIndices<'a>(pub str::CharIndices<'a>);
impl<'a> Input for CharIndices<'a> {
type Error = ();
fn peek(&mut self) -> Option<(BytePos, char)> {
self.clone().nth(0)
}
@ -87,7 +84,6 @@ impl<'a> Iterator for CharIndices<'a> {
}
pub trait Input: Iterator<Item = (BytePos, char)> {
type Error: Debug;
fn peek(&mut self) -> Option<(BytePos, char)>;
fn peek_ahead(&mut self) -> Option<(BytePos, char)>;
@ -103,8 +99,6 @@ impl<'a, I> Input for &'a mut I
where
I: Input,
{
type Error = I::Error;
fn peek(&mut self) -> Option<(BytePos, char)> {
<I as Input>::peek(*self)
}

View File

@ -8,11 +8,13 @@ pub use self::input::Input;
use self::input::LexerInput;
use self::state::State;
use self::util::*;
use Session;
use error::SyntaxError;
use parser_macros::parser;
use slog::Logger;
use std::char;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use swc_common::errors::Diagnostic;
use token::*;
#[macro_use]
@ -24,100 +26,24 @@ mod state;
mod tests;
pub mod util;
#[derive(Fail, Debug, PartialEq, Eq, Hash)]
pub enum Error<InputError> {
#[fail(display = "input error: {}", err)] Input {
err: InputError,
},
#[fail(display = "unterminated string constant: {}", start)]
UnterminatedStrLit {
start: BytePos,
},
#[fail(display = "expected unicode escape sequence: {}", pos)]
ExpectedUnicodeEscape {
pos: BytePos,
},
#[fail(display = "unexpected escape sequence in reserved word: {:?}", word)]
EscapeInReservedWord {
word: Word,
},
#[fail(display = "unterminated regexp (regexp started at {})", start)]
UnterminatedRegxp {
start: BytePos,
},
#[fail(display = "identifier directly after number at {}", pos)]
IdentAfterNum {
pos: BytePos,
},
#[fail(display = "Decimals with leading zeros (at {}) are not allowed in strict mode", start)]
DecimalStartsWithZero {
start: BytePos,
},
#[fail(display = "Octals with leading zeros (at {}) are not allowed in strict mode", start)]
ImplicitOctalOnStrict {
start: BytePos,
},
#[fail(display = "Unexpected character '{}' at {}", c, pos)]
UnexpectedChar {
pos: BytePos,
c: char,
},
#[fail(display = "Invalid string escape at {}", start)]
InvalidStrEscape {
start: BytePos,
},
pub type LexResult<'a, T> = Result<T, Diagnostic<'a>>;
#[fail(display = "Invalid unciode escape at {:?}", pos)]
InvalidUnicodeEscape {
pos: Span,
},
#[fail(display = "Invalid unciode code point at {:?}", pos)]
InvalidCodePoint {
pos: Span,
},
#[fail(display = "Invalid identifier character at {:?}", pos)]
InvalidIdentChar {
pos: Span,
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Options {
/// Support function bind expression.
pub fn_bind: bool,
pub strict: bool,
/// Support numeric separator.
pub num_sep: bool,
pub module: bool,
}
pub struct Lexer<I: Input> {
logger: Logger,
pub struct Lexer<'a, I: Input> {
session: Session<'a>,
input: LexerInput<I>,
opts: Options,
state: State,
}
impl<I: Input> Lexer<I> {
pub fn new(logger: Logger, input: I) -> Self {
Self::new_with(logger, Options::default(), input)
}
pub fn new_with(logger: Logger, opts: Options, input: I) -> Self {
impl<'a, I: Input> Lexer<'a, I> {
pub fn new(session: Session<'a>, input: I) -> Self {
Lexer {
logger,
opts,
state: State::new(),
session,
input: LexerInput::new(input),
state: State::new(),
}
}
fn read_token(&mut self) -> Result<Option<Token>, Error<I::Error>> {
fn read_token(&mut self) -> LexResult<'a, Option<Token>> {
let c = match self.input.current() {
Some(c) => c,
None => return Ok(None),
@ -181,7 +107,7 @@ impl<I: Input> Lexer<I> {
':' => {
self.input.bump();
if self.opts.fn_bind && self.input.current() == Some(':') {
if self.session.cfg.fn_bind && self.input.current() == Some(':') {
self.input.bump();
return Ok(Some(tok!("::")));
}
@ -340,21 +266,21 @@ impl<I: Input> Lexer<I> {
}
// unexpected character
c => return Err(Error::UnexpectedChar { c, pos: start }),
c => syntax_error!(self, pos_span(start), SyntaxError::UnexpectedChar { c }),
};
Ok(Some(token))
}
/// Read an escaped charater for string literal.
fn read_escaped_char(&mut self, in_template: bool) -> Result<Option<char>, Error<I::Error>> {
fn read_escaped_char(&mut self, in_template: bool) -> LexResult<'a, Option<char>> {
assert_eq!(cur!(self), Some('\\'));
let start = cur_pos!(self);
bump!(self); // '\'
let c = match cur!(self) {
Some(c) => c,
None => return Err(Error::InvalidStrEscape { start }),
None => syntax_error!(self, pos_span(start), SyntaxError::InvalidStrEscape),
};
let c = match c {
'n' => '\n',
@ -435,8 +361,8 @@ impl<I: Input> Lexer<I> {
}
#[parser]
impl<I: Input> Lexer<I> {
fn read_slash(&mut self) -> Result<Option<Token>, Error<I::Error>> {
impl<'a, I: Input> Lexer<'a, I> {
fn read_slash(&mut self) -> LexResult<'a, Option<Token>> {
debug_assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
@ -456,14 +382,14 @@ impl<I: Input> Lexer<I> {
}))
}
fn read_token_lt_gt(&mut self) -> Result<Option<Token>, Error<I::Error>> {
fn read_token_lt_gt(&mut self) -> LexResult<'a, Option<Token>> {
assert!(cur!() == Some('<') || cur!() == Some('>'));
let c = cur!().unwrap();
bump!();
// XML style comment. `<!--`
if !self.opts.module && c == '<' && is!('!') && peek!() == Some('-')
if !self.session.cfg.module && c == '<' && is!('!') && peek!() == Some('-')
&& peek_ahead!() == Some('-')
{
self.skip_line_comment(3);
@ -502,20 +428,25 @@ impl<I: Input> Lexer<I> {
}
/// See https://tc39.github.io/ecma262/#sec-names-and-keywords
fn read_ident_or_keyword(&mut self) -> Result<Token, Error<I::Error>> {
fn read_ident_or_keyword(&mut self) -> LexResult<'a, Token> {
assert!(cur!().is_some());
let start = cur_pos!();
let (word, has_escape) = self.read_word_as_str()?;
// TODO: Use extension trait instead of into/from
let word = Word::from(word);
if has_escape && word.is_reserved_word(self.opts.strict) {
return Err(Error::EscapeInReservedWord { word });
if has_escape && word.is_reserved_word(self.session.cfg.strict) {
syntax_error!(
span!(start),
SyntaxError::EscapeInReservedWord { word: word.into() }
);
} else {
Ok(Word(word))
}
Ok(Word(word))
}
fn may_read_word_as_str(&mut self) -> Result<Option<(JsWord, bool)>, Error<I::Error>> {
fn may_read_word_as_str(&mut self) -> LexResult<'a, Option<(JsWord, bool)>> {
match cur!() {
Some(c) if c.is_ident_start() => self.read_word_as_str().map(Some),
_ => Ok(None),
@ -523,7 +454,7 @@ impl<I: Input> Lexer<I> {
}
/// returns (word, has_escape)
fn read_word_as_str(&mut self) -> Result<(JsWord, bool), Error<I::Error>> {
fn read_word_as_str(&mut self) -> LexResult<'a, (JsWord, bool)> {
assert!(cur!().is_some());
let mut has_escape = false;
@ -542,7 +473,7 @@ impl<I: Input> Lexer<I> {
'\\' => {
bump!();
if !is!('u') {
return Err(Error::ExpectedUnicodeEscape { pos: cur_pos!() });
syntax_error!(pos_span(start), SyntaxError::ExpectedUnicodeEscape);
}
let c = self.read_unicode_escape(start)?;
let valid = if first {
@ -552,7 +483,7 @@ impl<I: Input> Lexer<I> {
};
if !valid {
return Err(Error::InvalidIdentChar { pos: span!(start) });
syntax_error!(span!(start), SyntaxError::InvalidIdentChar);
}
word.push(c);
}
@ -566,7 +497,7 @@ impl<I: Input> Lexer<I> {
Ok((word.into(), has_escape))
}
fn read_unicode_escape(&mut self, start: BytePos) -> Result<char, Error<I::Error>> {
fn read_unicode_escape(&mut self, start: BytePos) -> LexResult<'a, char> {
assert_eq!(cur!(), Some('u'));
bump!();
@ -575,7 +506,7 @@ impl<I: Input> Lexer<I> {
let c = self.read_code_point()?;
if !eat!('}') {
return Err(Error::InvalidUnicodeEscape { pos: span!(start) });
syntax_error!(span!(start), SyntaxError::InvalidUnicodeEscape);
}
Ok(c)
@ -584,7 +515,7 @@ impl<I: Input> Lexer<I> {
}
}
fn read_hex_char(&mut self, count: u8) -> Result<char, Error<I::Error>> {
fn read_hex_char(&mut self, count: u8) -> LexResult<'a, char> {
debug_assert!(count == 2 || count == 4);
let pos = cur_pos!();
@ -598,20 +529,20 @@ impl<I: Input> Lexer<I> {
}
/// Read `CodePoint`.
fn read_code_point(&mut self) -> Result<char, Error<I::Error>> {
fn read_code_point(&mut self) -> LexResult<'a, char> {
let start = cur_pos!();
let val = self.read_int(16, 0)?;
match val {
Some(val) if 0x10FFFF >= val => match char::from_u32(val) {
Some(c) => Ok(c),
None => return Err(Error::InvalidCodePoint { pos: span!(start) }),
None => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
},
_ => return Err(Error::InvalidCodePoint { pos: span!(start) }),
_ => syntax_error!(span!(start), SyntaxError::InvalidCodePoint),
}
}
/// See https://tc39.github.io/ecma262/#sec-literals-string-literals
fn read_str_lit(&mut self) -> Result<Token, Error<I::Error>> {
fn read_str_lit(&mut self) -> LexResult<'a, Token> {
assert!(cur!() == Some('\'') || cur!() == Some('"'));
let start = cur_pos!();
let quote = cur!().unwrap();
@ -628,7 +559,9 @@ impl<I: Input> Lexer<I> {
return Ok(Str(out, c == '"'));
}
'\\' => out.extend(self.read_escaped_char(false)?),
c if c.is_line_break() => return Err(Error::UnterminatedStrLit { start }),
c if c.is_line_break() => {
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
}
_ => {
out.push(c);
bump!();
@ -636,11 +569,11 @@ impl<I: Input> Lexer<I> {
}
}
Err(Error::UnterminatedStrLit { start })
syntax_error!(span!(start), SyntaxError::UnterminatedStrLit)
}
/// Expects current char to be '/'
fn read_regexp(&mut self) -> Result<Token, Error<I::Error>> {
fn read_regexp(&mut self) -> LexResult<'a, Token> {
assert_eq!(cur!(), Some('/'));
let start = cur_pos!();
bump!();
@ -653,7 +586,7 @@ impl<I: Input> Lexer<I> {
// This is ported from babel.
// Seems like regexp literal cannot contain linebreak.
if c.is_line_break() {
return Err(Error::UnterminatedRegxp { start });
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
}
if escaped {
@ -674,7 +607,7 @@ impl<I: Input> Lexer<I> {
// input is terminated without following `/`
if cur!() != Some('/') {
return Err(Error::UnterminatedRegxp { start });
syntax_error!(span!(start), SyntaxError::UnterminatedRegxp);
}
bump!(); // '/'
@ -691,7 +624,7 @@ impl<I: Input> Lexer<I> {
Ok(Regex(content, flags))
}
fn read_tmpl_token(&mut self) -> Result<Token, Error<I::Error>> {
fn read_tmpl_token(&mut self) -> LexResult<'a, Token> {
let start = cur_pos!();
// TODO: Optimize
@ -735,16 +668,12 @@ impl<I: Input> Lexer<I> {
unimplemented!("error: unterminated template");
}
}
impl<I: Input> ::parser::Input for Lexer<I> {
fn had_line_break_before_last(&self) -> bool {
pub fn had_line_break_before_last(&self) -> bool {
self.state.had_line_break
}
}
impl<'a> Lexer<input::CharIndices<'a>> {
pub fn new_from_str(logger: Logger, s: &'a str) -> Self {
Lexer::new(logger, input::CharIndices(s.char_indices()))
}
fn pos_span(p: BytePos) -> Span {
Span::new(p, p, Default::default())
}

View File

@ -4,14 +4,15 @@
//! See https://tc39.github.io/ecma262/#sec-literals-numeric-literals
use super::*;
use error::SyntaxError;
use std::fmt::Display;
#[parser]
impl<I: Input> Lexer<I> {
impl<'a, I: Input> Lexer<'a, I> {
/// Reads an integer, octal integer, or floating-point number
///
///
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> Result<Number, Error<I::Error>> {
pub(super) fn read_number(&mut self, starts_with_dot: bool) -> LexResult<'a, Number> {
assert!(cur!().is_some());
if starts_with_dot {
debug_assert_eq!(
@ -43,15 +44,15 @@ impl<I: Input> Lexer<I> {
// e.g. `000` is octal
if start.0 != last_pos!().0 - 1 {
// `-1` is utf 8 length of `0`
return self.make_legacy_octal(start, 0f64);
}
} else {
// strict mode hates non-zero decimals starting with zero.
// e.g. 08.1 is strict mode violation but 0.1 is valid float.
if self.opts.strict {
return Err(Error::DecimalStartsWithZero { start });
if self.session.cfg.strict {
syntax_error!(span!(start), SyntaxError::LegacyDecimal);
}
let s = format!("{}", val); // TODO: Remove allocation.
@ -136,7 +137,7 @@ impl<I: Input> Lexer<I> {
Ok(Number(val))
}
pub(super) fn read_radix_number(&mut self, radix: u8) -> Result<Number, Error<I::Error>> {
pub(super) fn read_radix_number(&mut self, radix: u8) -> LexResult<'a, Number> {
debug_assert!(
radix == 2 || radix == 8 || radix == 16,
"radix should be one of 2, 8, 16, but got {}",
@ -155,7 +156,7 @@ impl<I: Input> Lexer<I> {
/// This can read long integers like
/// "13612536612375123612312312312312312312312".
fn read_number_no_dot(&mut self, radix: u8) -> Result<f64, Error<I::Error>> {
fn read_number_no_dot(&mut self, radix: u8) -> LexResult<'a, f64> {
debug_assert!(
radix == 2 || radix == 8 || radix == 10 || radix == 16,
"radix for read_number_no_dot should be one of 2, 8, 10, 16, but got {}",
@ -168,9 +169,11 @@ impl<I: Input> Lexer<I> {
}
/// Ensure that ident cannot directly follow numbers.
fn ensure_not_ident(&mut self) -> Result<(), Error<I::Error>> {
fn ensure_not_ident(&mut self) -> LexResult<'a, ()> {
match cur!() {
Some(c) if c.is_ident_start() => Err(Error::IdentAfterNum { pos: cur_pos!() }),
Some(c) if c.is_ident_start() => {
syntax_error!(pos_span(cur_pos!()), SyntaxError::IdentAfterNum)
}
_ => Ok(()),
}
}
@ -179,7 +182,7 @@ impl<I: Input> Lexer<I> {
/// were read, the integer value otherwise.
/// When `len` is not zero, this
/// will return `None` unless the integer has exactly `len` digits.
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> Result<Option<u32>, Error<I::Error>> {
pub(super) fn read_int(&mut self, radix: u8, len: u8) -> LexResult<'a, Option<u32>> {
let mut count = 0;
self.read_digits(radix, |opt: Option<u32>, radix, val| {
count += 1;
@ -189,7 +192,7 @@ impl<I: Input> Lexer<I> {
}
/// `op`- |total, radix, value| -> (total * radix + value, continue)
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> Result<Ret, Error<I::Error>>
fn read_digits<F, Ret>(&mut self, radix: u8, mut op: F) -> LexResult<'a, Ret>
where
F: FnMut(Ret, u8, u32) -> (Ret, bool),
Ret: Copy + Default,
@ -200,7 +203,7 @@ impl<I: Input> Lexer<I> {
radix
);
debug!(
self.logger,
self.session.logger,
"read_digits(radix = {}), cur = {:?}",
radix,
cur!(self)
@ -211,7 +214,7 @@ impl<I: Input> Lexer<I> {
let mut total: Ret = Default::default();
while let Some(c) = cur!() {
if self.opts.num_sep {
if self.session.cfg.num_sep {
// let prev: char = unimplemented!("prev");
// let next = self.input.peek();
@ -250,10 +253,10 @@ impl<I: Input> Lexer<I> {
Ok(total)
}
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> Result<Number, Error<I::Error>> {
fn make_legacy_octal(&mut self, start: BytePos, val: f64) -> LexResult<'a, Number> {
self.ensure_not_ident()?;
return if self.opts.strict {
Err(Error::ImplicitOctalOnStrict { start })
return if self.session.cfg.strict {
syntax_error!(span!(start), SyntaxError::LegacyOctal)
} else {
// FIXME
Ok(Number(val))
@ -268,23 +271,36 @@ mod tests {
use std::f64::INFINITY;
use std::panic;
fn lexer(s: &'static str) -> Lexer<CharIndices<'static>> {
let l = ::testing::logger().new(o!("src" => s));
Lexer::new_from_str(l, s)
fn lex<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Lexer<CharIndices>) -> Ret,
{
::with_test_sess(s, |sess| {
let mut l = Lexer::new(sess, CharIndices(s.char_indices()));
f(&mut l)
})
}
fn num(s: &'static str) -> f64 {
lexer(s)
.read_number(s.starts_with("."))
.expect("read_number failed")
.0
lex(s, |l| {
l.read_number(s.starts_with("."))
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.0
})
}
fn int(radix: u8, s: &'static str) -> u32 {
lexer(s)
.read_int(radix, 0)
.expect("read_int failed")
.expect("read_int returned None")
lex(s, |l| {
l.read_int(radix, 0)
.unwrap_or_else(|err| {
err.emit();
unreachable!()
})
.expect("read_int returned None")
})
}
const LONG: &str = "1e10000000000000000000000000000000000000000\
@ -327,7 +343,13 @@ mod tests {
#[test]
fn read_radix_number() {
assert_eq!(Ok(Number(0o73 as f64)), lexer("0o73").read_radix_number(8));
assert_eq!(
Number(0o73 as f64),
lex("0o73", |l| l.read_radix_number(8).unwrap_or_else(|err| {
err.emit();
unreachable!()
}))
);
}
/// Valid even on strict mode.
@ -337,12 +359,11 @@ mod tests {
fn test_floats(strict: bool, success: bool, cases: &'static [&'static str]) {
for case in cases {
let logger = ::testing::logger().new(o!("src" => case,
"strict" => strict,
"expected" => if success { "success" } else { "error" }
));
// lazy way to get expected value..
println!(
"Testing {} (when strict = {}); Expects success = {}",
case, strict, success
);
// lazy way to get expected values
let expected: f64 = (i64::from_str_radix(case, 8).map(|v| v as f64))
.or_else(|_| case.parse::<i64>().map(|v| v as f64))
.or_else(|_| case.parse::<f64>())
@ -350,15 +371,12 @@ mod tests {
let input = CharIndices(case.char_indices());
let vec = panic::catch_unwind(|| {
Lexer::new_with(
logger,
Options {
strict,
..Default::default()
},
input,
).map(|ts| ts.token)
.collect::<Vec<_>>()
::with_test_sess(case, |mut sess| {
sess.cfg.strict = strict;
Lexer::new(sess, input)
.map(|ts| ts.token)
.collect::<Vec<_>>()
})
});
if success {
@ -371,7 +389,7 @@ mod tests {
assert_eq!(Num(Number(expected)), token);
} else {
match vec {
Ok(vec) => assert!(vec![Num(Number(expected))] != vec),
Ok(vec) => assert_ne!(vec![Num(Number(expected))], vec),
_ => {}
}
}

View File

@ -1,7 +1,7 @@
use super::{Input, Lexer};
use parser_macros::parser;
use slog::Logger;
use swc_common::{BytePos, Span};
use swc_common::BytePos;
use token::*;
/// State of lexer.
@ -23,7 +23,7 @@ pub(super) struct State {
}
#[parser]
impl<I: Input> Iterator for Lexer<I> {
impl<'a, I: Input> Iterator for Lexer<'a, I> {
type Item = TokenAndSpan;
fn next(&mut self) -> Option<Self::Item> {
self.state.had_line_break = self.state.is_first;
@ -35,27 +35,31 @@ impl<I: Input> Iterator for Lexer<I> {
};
let start = cur_pos!();
if self.state.is_in_template() {
let token = self.read_tmpl_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err));
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token,
span: span!(start),
});
let res = if self.state.is_in_template() {
self.read_tmpl_token().map(Some)
} else {
self.read_token()
};
let token = res.unwrap_or_else(|err| {
// Report error
err.emit();
Some(Token::Error)
});
if let Some(ref token) = token {
self.state.update(&self.session.logger, &token)
}
if let Some(token) = self.read_token()
.unwrap_or_else(|err| unimplemented!("error handling: {:?}", err))
{
self.state.update(&self.logger, &token);
return Some(TokenAndSpan {
token.map(|token| {
// Attatch span to token.
TokenAndSpan {
token,
had_line_break: self.had_line_break_before_last(),
span: span!(start),
});
}
None
}
})
}
}

View File

@ -3,38 +3,53 @@ use super::input::CharIndices;
use std::ops::Range;
use std::str;
fn make_lexer(s: &'static str) -> Lexer<CharIndices<'static>> {
let logger = ::testing::logger().new(o!("src" => s));
Lexer::new_from_str(logger, s)
fn with_lexer<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Lexer<CharIndices>) -> Ret,
{
::with_test_sess(s, |sess| {
let mut l = Lexer::new(sess, CharIndices(s.char_indices()));
f(&mut l)
})
}
fn lex(s: &'static str) -> Vec<TokenAndSpan> {
println!("Source:\n{}", s);
let lexer = make_lexer(&s);
lexer.collect()
with_lexer(s, |l| l.collect())
}
fn lex_tokens(s: &'static str) -> Vec<Token> {
let lexer = make_lexer(&s);
lexer.map(|ts| ts.token).collect()
with_lexer(s, |l| l.map(|ts| ts.token).collect())
}
trait LineBreak: Into<TokenAndSpan> {
fn lb(self) -> TokenAndSpan {
TokenAndSpan {
had_line_break: true,
..self.into()
}
}
}
impl LineBreak for TokenAndSpan {}
trait SpanRange: Sized {
fn into_span(self) -> Span;
}
impl SpanRange for usize {
fn into_span(self) -> Span {
Span {
start: BytePos(self as _),
end: BytePos((self + 1usize) as _),
}
Span::new(
BytePos(self as _),
BytePos((self + 1usize) as _),
Default::default(),
)
}
}
impl SpanRange for Range<usize> {
fn into_span(self) -> Span {
Span {
start: BytePos(self.start as _),
end: BytePos(self.end as _),
}
Span::new(
BytePos(self.start as _),
BytePos(self.end as _),
Default::default(),
)
}
}
@ -45,6 +60,7 @@ trait WithSpan: Sized {
{
TokenAndSpan {
token: self.into_token(),
had_line_break: false,
span: span.into_span(),
}
}
@ -95,7 +111,7 @@ impl WithSpan for AssignOpToken {
fn test262_lexer_error_0001() {
assert_eq!(
vec![
123f64.span(0..4),
123f64.span(0..4).lb(),
Dot.span(4..5),
"a".span(5..6),
LParen.span(6..7),
@ -110,7 +126,7 @@ fn test262_lexer_error_0001() {
fn test262_lexer_error_0002() {
assert_eq!(
vec![
Str("use strict".into(), false).span(0..15),
Str("use strict".into(), false).span(0..15).lb(),
Semi.span(15..16),
],
lex(r#"'use\x20strict';"#)
@ -119,7 +135,7 @@ fn test262_lexer_error_0002() {
#[test]
fn test262_lexer_error_0003() {
assert_eq!(vec!["a".span(0..6)], lex(r#"\u0061"#));
assert_eq!(vec!["a".span(0..6).lb()], lex(r#"\u0061"#));
}
#[test]
@ -132,26 +148,29 @@ fn test262_lexer_error_0004() {
#[test]
fn ident_escape_unicode() {
assert_eq!(vec!["aa".span(0..7)], lex(r#"a\u0061"#));
assert_eq!(vec!["aa".span(0..7).lb()], lex(r#"a\u0061"#));
}
#[test]
fn ident_escape_unicode_2() {
assert_eq!(lex("℘℘"), vec!["℘℘".span(0..6)]);
assert_eq!(lex("℘℘"), vec!["℘℘".span(0..6).lb()]);
assert_eq!(lex(r#"℘\u2118"#), vec!["℘℘".span(0..9)]);
assert_eq!(lex(r#"℘\u2118"#), vec!["℘℘".span(0..9).lb()]);
}
#[test]
fn str_escape_hex() {
assert_eq!(lex(r#"'\x61'"#), vec![Str("a".into(), false).span(0..6)]);
assert_eq!(
lex(r#"'\x61'"#),
vec![Str("a".into(), false).span(0..6).lb()]
);
}
#[test]
fn str_escape_octal() {
assert_eq!(
lex(r#"'Hello\012World'"#),
vec![Str("Hello\nWorld".into(), false).span(0..16)]
vec![Str("Hello\nWorld".into(), false).span(0..16).lb()]
)
}
@ -159,7 +178,7 @@ fn str_escape_octal() {
fn str_escape_unicode_long() {
assert_eq!(
lex(r#"'\u{00000000034}'"#),
vec![Str("4".into(), false).span(0..17)]
vec![Str("4".into(), false).span(0..17).lb()]
);
}
@ -167,12 +186,15 @@ fn str_escape_unicode_long() {
fn regexp_unary_void() {
assert_eq!(
lex("void /test/"),
vec![Void.span(0..4), Regex("test".into(), "".into()).span(5..11)]
vec![
Void.span(0..4).lb(),
Regex("test".into(), "".into()).span(5..11),
]
);
assert_eq!(
lex("void (/test/)"),
vec![
Void.span(0..4),
Void.span(0..4).lb(),
LParen.span(5..6),
Regex("test".into(), "".into()).span(6..12),
RParen.span(12..13),
@ -185,7 +207,7 @@ fn non_regexp_unary_plus() {
assert_eq!(
lex("+{} / 1"),
vec![
tok!('+').span(0..1),
tok!('+').span(0..1).lb(),
tok!('{').span(1..2),
tok!('}').span(2..3),
tok!('/').span(4..5),
@ -199,7 +221,7 @@ fn non_regexp_unary_plus() {
#[test]
fn invalid_but_lexable() {
assert_eq!(
vec![LParen.span(0), LBrace.span(1), Semi.span(2)],
vec![LParen.span(0).lb(), LBrace.span(1), Semi.span(2)],
lex("({;")
);
}
@ -207,7 +229,7 @@ fn invalid_but_lexable() {
#[test]
fn paren_semi() {
assert_eq!(
vec![LParen.span(0), RParen.span(1), Semi.span(2)],
vec![LParen.span(0).lb(), RParen.span(1), Semi.span(2)],
lex("();")
);
}
@ -216,7 +238,7 @@ fn paren_semi() {
fn ident_paren() {
assert_eq!(
vec![
"a".span(0),
"a".span(0).lb(),
LParen.span(1),
"bc".span(2..4),
RParen.span(4),
@ -228,21 +250,27 @@ fn ident_paren() {
#[test]
fn read_word() {
assert_eq!(vec!["a".span(0), "b".span(2), "c".span(4)], lex("a b c"),)
assert_eq!(
vec!["a".span(0).lb(), "b".span(2), "c".span(4)],
lex("a b c"),
)
}
#[test]
fn simple_regex() {
assert_eq!(
lex("x = /42/i"),
vec![
"x".span(0),
"x".span(0).lb(),
Assign.span(2),
Regex("42".into(), "i".into()).span(4..9),
],
lex("x = /42/i")
);
assert_eq!(vec![Regex("42".into(), "".into()).span(0..4)], lex("/42/"));
assert_eq!(
lex("/42/"),
vec![Regex("42".into(), "".into()).span(0..4).lb()]
);
}
#[test]
@ -268,7 +296,10 @@ fn complex_regex() {
#[test]
fn simple_div() {
assert_eq!(vec!["a".span(0), Div.span(2), "b".span(4)], lex("a / b"));
assert_eq!(
vec!["a".span(0).lb(), Div.span(2), "b".span(4)],
lex("a / b")
);
}
#[test]
@ -333,8 +364,9 @@ fn spec_001() {
#[test]
fn after_if() {
assert_eq!(
lex("if(x){} /y/.test(z)"),
vec![
Keyword::If.span(0..2),
Keyword::If.span(0..2).lb(),
LParen.span(2),
"x".span(3),
RParen.span(4),
@ -347,7 +379,6 @@ fn after_if() {
"z".span(17),
RParen.span(18),
],
lex("if(x){} /y/.test(z)"),
)
}
@ -393,7 +424,7 @@ fn invalid_number_failure() {
fn migrated_0002() {
assert_eq!(
vec![
"tokenize".span(0..8),
"tokenize".span(0..8).lb(),
LParen.span(8),
Regex("42".into(), "".into()).span(9..13),
RParen.span(13),
@ -406,7 +437,7 @@ fn migrated_0002() {
fn migrated_0003() {
assert_eq!(
vec![
LParen.span(0),
LParen.span(0).lb(),
Word::False.span(1..6),
RParen.span(6),
Div.span(8),
@ -421,7 +452,7 @@ fn migrated_0003() {
fn migrated_0004() {
assert_eq!(
vec![
Function.span(0..8),
Function.span(0..8).lb(),
"f".span(9),
LParen.span(10),
RParen.span(11),
@ -455,13 +486,13 @@ fn migrated_0004() {
fn migrated_0006() {
// This test seems wrong.
// assert_eq!(
// vec![LBrace.span(0), RBrace.span(1), Div.span(3), 42.span(4..6)],
// vec![LBrace.span(0).lb(), RBrace.span(1), Div.span(3), 42.span(4..6)],
// lex("{} /42")
// )
assert_eq!(
vec![
LBrace.span(0),
LBrace.span(0).lb(),
RBrace.span(1),
Regex("42".into(), "".into()).span(3..7),
],

View File

@ -13,15 +13,15 @@ use super::input::Input;
use parser_macros::parser;
use unicode_xid::UnicodeXID;
pub const BACKSPACE: char = 8 as char;
pub const SHIFT_OUT: char = 14 as char;
pub const OGHAM_SPACE_MARK: char = '\u{1680}'; // ''
pub const LINE_FEED: char = '\n';
pub const LINE_SEPARATOR: char = '\u{2028}';
pub const PARAGRAPH_SEPARATOR: char = '\u{2029}';
// pub const BACKSPACE: char = 8 as char;
// pub const SHIFT_OUT: char = 14 as char;
// pub const OGHAM_SPACE_MARK: char = '\u{1680}'; // ''
// pub const LINE_FEED: char = '\n';
// pub const LINE_SEPARATOR: char = '\u{2028}';
// pub const PARAGRAPH_SEPARATOR: char = '\u{2029}';
#[parser]
impl<I: Input> Lexer<I> {
impl<'a, I: Input> Lexer<'a, I> {
/// Skip comments or whitespaces.
///
/// See https://tc39.github.io/ecma262/#sec-white-space

View File

@ -3,7 +3,7 @@
#![feature(const_fn)]
#![feature(specialization)]
#![feature(never_type)]
#![feature(nll)]
// #![feature(nll)]
#![feature(proc_macro)]
#![feature(try_from)]
#![feature(try_trait)]
@ -12,25 +12,68 @@
#![deny(unsafe_code)]
extern crate either;
#[macro_use]
extern crate failure;
extern crate parser_macros;
#[macro_use]
extern crate slog;
#[macro_use(js_word)]
extern crate swc_atoms;
extern crate swc_common;
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecma_ast as ast;
#[macro_use]
extern crate swc_macros;
pub extern crate swc_macros;
#[cfg(test)]
#[macro_use]
extern crate testing;
extern crate unicode_xid;
pub use self::lexer::input::{CharIndices, Input};
pub use self::parser::*;
use slog::Logger;
use swc_common::errors::Handler;
#[macro_use]
mod macros;
pub mod error;
pub mod lexer;
pub mod token;
pub mod parser;
mod error;
mod lexer;
mod token;
mod parser;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct Config {
pub strict: bool,
/// Support numeric separator.
pub num_sep: bool,
/// Support function bind expression.
pub fn_bind: bool,
/// Is in module code?
pub module: bool,
}
#[derive(Clone, Copy)]
pub struct Session<'a> {
pub cfg: Config,
pub logger: &'a Logger,
pub handler: &'a Handler,
}
#[cfg(test)]
fn with_test_sess<F, Ret>(src: &'static str, f: F) -> Ret
where
F: FnOnce(Session) -> Ret,
{
let handler = ::swc_common::errors::Handler::with_tty_emitter(
::swc_common::errors::ColorConfig::Never,
true,
false,
None,
);
let logger = ::testing::logger().new(o!("src" => src));
f(Session {
handler: &handler,
logger: &logger,
cfg: Default::default(),
})
}

View File

@ -39,10 +39,13 @@ macro_rules! tok {
("async") => { Token::Word(Word::Ident(js_word!("async"))) };
("as") => { Token::Word(Word::Ident(js_word!("as"))) };
("await") => { Token::Word(Keyword(Await)) };
("break") => { Token::Word(Keyword(Break)) };
("case") => { Token::Word(Keyword(Case)) };
("catch") => { Token::Word(Keyword(Catch)) };
("class") => { Token::Word(Keyword(Class)) };
("const") => { Token::Word(Keyword(Const)) };
("continue") => { Token::Word(Keyword(Continue)) };
("debugger") => { Token::Word(Keyword(Debugger)) };
("default") => { Token::Word(Keyword(Default_)) };
("delete") => { Token::Word(Keyword(Delete)) };
("do") => { Token::Word(Keyword(Do)) };
@ -90,9 +93,9 @@ macro_rules! span {
let end: ::swc_common::BytePos = last_pos!($p);
if cfg!(debug_assertions) && start > end {
unreachable!("assertion failed: (span.start <= span.end).
start = {}, end = {}", start, end)
start = {}, end = {}", start.0, end.0)
}
Span { start, end }
::swc_common::Span::new(start, end, Default::default())
}};
}
@ -100,17 +103,35 @@ macro_rules! spanned {
(
$p:expr, { $($body:tt)* }
) => {{
let start = cur_pos!($p);
let start = { cur_pos!($p) };
let val: Result<_, _> = {
$($body)*
};
#[allow(unreachable_code)]
match val {
Ok(val) => {
let end = last_pos!($p);
Ok(::swc_common::Spanned::from_unspanned(val, Span { start, end }))
},
Err(err) => Err(err),
{
match val {
Ok(val) => {
let span = span!($p, start);
let val = ::swc_common::Spanned::from_unspanned(val, span);
Ok(val)
},
Err(err) => Err(err),
}
}
}};
}
macro_rules! syntax_error {
($p:expr, $err:expr) => {{
syntax_error!($p, $p.input.cur_span(), $err)
}};
($p:expr, $span:expr, $err:expr) => {{
let err = $crate::error::Error {
handler: $p.session.handler,
span: $span,
error: $err,
};
let res: Result<!, _> = Err(err);
res?
}};
}

View File

@ -4,54 +4,54 @@ use super::*;
use super::ident::MaybeOptionalIdentParser;
#[parser]
impl<I: Input> Parser<I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<Box<Expr>> {
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_async_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
/// Parse function expression
pub(super) fn parse_fn_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_fn_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_fn(None)
}
pub(super) fn parse_async_fn_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_async_fn_decl(&mut self) -> PResult<'a, Decl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_fn_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_fn_decl(&mut self) -> PResult<'a, Decl> {
self.parse_fn(None)
}
pub(super) fn parse_default_async_fn(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_async_fn(&mut self) -> PResult<'a, ExportDefaultDecl> {
let start = cur_pos!();
expect!("async");
self.parse_fn(Some(start))
}
pub(super) fn parse_default_fn(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_fn(&mut self) -> PResult<'a, ExportDefaultDecl> {
self.parse_fn(None)
}
pub(super) fn parse_class_decl(&mut self) -> PResult<Decl> {
pub(super) fn parse_class_decl(&mut self) -> PResult<'a, Decl> {
self.parse_class()
}
pub(super) fn parse_class_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_class_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_class()
}
pub(super) fn parse_default_class(&mut self) -> PResult<ExportDefaultDecl> {
pub(super) fn parse_default_class(&mut self) -> PResult<'a, ExportDefaultDecl> {
self.parse_class()
}
fn parse_class<T>(&mut self) -> PResult<T>
fn parse_class<T>(&mut self) -> PResult<'a, T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
{
let start = cur_pos!();
expect!("class");
@ -71,14 +71,14 @@ impl<I: Input> Parser<I> {
Ok(T::finish_class(
ident,
Class {
span: Span { start, end },
span: Span::new(start, end, Default::default()),
super_class,
body,
},
))
}
fn parse_class_body(&mut self) -> PResult<Vec<ClassMethod>> {
fn parse_class_body(&mut self) -> PResult<'a, Vec<ClassMethod>> {
let mut elems = vec![];
while !eof!() && !is!('}') {
if eat_exact!(';') {
@ -90,7 +90,7 @@ impl<I: Input> Parser<I> {
Ok(elems)
}
fn parse_class_element(&mut self) -> PResult<ClassMethod> {
fn parse_class_element(&mut self) -> PResult<'a, ClassMethod> {
// ignore semi
let start_of_static = {
@ -105,17 +105,17 @@ impl<I: Input> Parser<I> {
self.parse_method_def(start_of_static)
}
fn parse_fn<T>(&mut self, start_of_async: Option<BytePos>) -> PResult<T>
fn parse_fn<T>(&mut self, start_of_async: Option<BytePos>) -> PResult<'a, T>
where
T: OutputType,
Self: MaybeOptionalIdentParser<T::Ident>,
Self: MaybeOptionalIdentParser<'a, T::Ident>,
{
let start = start_of_async.unwrap_or(cur_pos!());
assert_and_bump!("function");
let is_async = start_of_async.is_some();
if is_async && is!('*') {
syntax_error!(SyntaxError::AsyncGenerator);
syntax_error!(SyntaxError::AsyncGenerator {});
}
let is_generator = eat!('*');
@ -131,11 +131,7 @@ impl<I: Input> Parser<I> {
Ok(T::finish_fn(
ident,
Function {
span: Span {
start,
end: last_pos!(),
},
span: span!(start),
is_async,
is_generator,
params,
@ -151,25 +147,26 @@ impl<I: Input> Parser<I> {
parse_args: F,
is_async: bool,
is_generator: bool,
) -> PResult<Function>
) -> PResult<'a, Function>
where
F: FnOnce(&mut Self) -> PResult<Vec<Pat>>,
F: FnOnce(&mut Self) -> PResult<'a, Vec<Pat>>,
{
self.with_ctx(Context {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_with(|mut p| {
expect!(p, '(');
};
self.with_ctx(ctx).parse_with(|mut p| {
expect!('(');
let params = parse_args(&mut p)?;
expect!(p, ')');
expect!(')');
let body = p.parse_fn_body(is_async, is_generator)?;
Ok(Function {
span: span!(p, start),
span: span!(start),
params,
body,
is_async,
@ -178,7 +175,7 @@ impl<I: Input> Parser<I> {
})
}
fn parse_method_def(&mut self, start_of_static: Option<BytePos>) -> PResult<ClassMethod> {
fn parse_method_def(&mut self, start_of_static: Option<BytePos>) -> PResult<'a, ClassMethod> {
let is_static = start_of_static.is_some();
let start = start_of_static.unwrap_or(cur_pos!());
@ -277,15 +274,16 @@ impl<I: Input> Parser<I> {
}
}
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<T>
pub(super) fn parse_fn_body<T>(&mut self, is_async: bool, is_generator: bool) -> PResult<'a, T>
where
Self: FnBodyParser<T>,
Self: FnBodyParser<'a, T>,
{
self.with_ctx(Context {
let ctx = Context {
in_async: is_async,
in_generator: is_generator,
..self.ctx
}).parse_fn_body_inner()
};
self.with_ctx(ctx).parse_fn_body_inner()
}
}
@ -335,13 +333,13 @@ impl OutputType for Decl {
}
}
pub(super) trait FnBodyParser<Body> {
fn parse_fn_body_inner(&mut self) -> PResult<Body>;
pub(super) trait FnBodyParser<'a, Body> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, Body>;
}
#[parser]
impl<I: Input> FnBodyParser<BlockStmtOrExpr> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmtOrExpr> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmtOrExpr> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmtOrExpr> {
if is!('{') {
self.parse_block().map(BlockStmtOrExpr::BlockStmt)
} else {
@ -350,8 +348,8 @@ impl<I: Input> FnBodyParser<BlockStmtOrExpr> for Parser<I> {
}
}
impl<I: Input> FnBodyParser<BlockStmt> for Parser<I> {
fn parse_fn_body_inner(&mut self) -> PResult<BlockStmt> {
impl<'a, I: Input> FnBodyParser<'a, BlockStmt> for Parser<'a, I> {
fn parse_fn_body_inner(&mut self) -> PResult<'a, BlockStmt> {
self.parse_block()
}
}
@ -359,25 +357,28 @@ impl<I: Input> FnBodyParser<BlockStmt> for Parser<I> {
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
use swc_common::DUMMY_SP;
fn lhs(s: &'static str) -> Box<Expr> {
mk(s)
.parse_lhs_expr()
.expect("failed to parse lhs expression")
test_parser(s, |p| {
p.parse_lhs_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse a left-hand-side expression")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
test_parser(s, |p| {
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
const span: Span = DUMMY_SP;
#[test]
fn class_expr() {

View File

@ -6,19 +6,19 @@ mod ops;
mod tests;
#[parser]
impl<I: Input> Parser<I> {
pub fn parse_expr(&mut self) -> PResult<Box<Expr>> {
impl<'a, I: Input> Parser<'a, I> {
pub fn parse_expr(&mut self) -> PResult<'a, Box<Expr>> {
let expr = self.parse_assignment_expr()?;
let start = expr.span.start;
let start = expr.span.lo();
if is!(',') {
let mut exprs = vec![expr];
while eat!(',') {
exprs.push(self.parse_assignment_expr()?);
}
let end = exprs.last().unwrap().span.end;
let end = exprs.last().unwrap().span.hi();
return Ok(box Expr {
span: Span { start, end },
span: Span::new(start, end, Default::default()),
node: ExprKind::Seq(SeqExpr { exprs }),
});
}
@ -29,7 +29,7 @@ impl<I: Input> Parser<I> {
/// Parse an assignment expression. This includes applications of
/// operators like `+=`.
///
pub(super) fn parse_assignment_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_assignment_expr(&mut self) -> PResult<'a, Box<Expr>> {
if self.ctx.in_generator && is!("yield") {
return self.parse_yield_expr();
}
@ -59,7 +59,7 @@ impl<I: Input> Parser<I> {
}
match cur!() {
Some(&AssignOp(op)) => {
Ok(&AssignOp(op)) => {
bump!();
let right = self.parse_assignment_expr()?;
Ok(box Expr {
@ -77,7 +77,7 @@ impl<I: Input> Parser<I> {
}
/// Spec: 'ConditionalExpression'
fn parse_cond_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_cond_expr(&mut self) -> PResult<'a, Box<Expr>> {
spanned!({
let test = self.parse_bin_expr()?;
return_if_arrow!(test);
@ -95,79 +95,87 @@ impl<I: Input> Parser<I> {
}
/// Parse a primary expression or arrow function
fn parse_primary_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_primary_expr(&mut self) -> PResult<'a, Box<Expr>> {
let can_be_arrow = self.state
.potential_arrow_start
.map(|s| s == cur_pos!())
.unwrap_or(false);
// debug!(
// self.logger,
// "Parsing a primary expression. cur={:?} can_be_arrow={}",
// cur!(),
// can_be_arrow
// );
if is!("this") {
return self.spanned(|p| {
assert_and_bump!("this");
Ok(ExprKind::This)
});
}
let t = cur!()?;
match *t {
tok!("this") => {
return spanned!({
assert_and_bump!("this");
Ok(ExprKind::This)
});
// Handle async function expression
if { is!("async") } && { peeked_is!("function") } && {
!self.input.has_linebreak_between_cur_and_peeked()
} {
return self.parse_async_fn_expr();
}
if is!('[') {
return self.parse_array_lit();
}
if is!('{') {
return self.parse_object();
}
// Handle FunctionExpression and GeneratorExpression
if is!("function") {
return self.parse_fn_expr();
}
if is!("class") {
return self.parse_class_expr();
}
// Literals
if {
match *cur!()? {
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => true,
_ => false,
}
} {
return self.spanned(|p| p.parse_lit().map(ExprKind::Lit));
}
tok!("async") => {
// Handle async function expression
if peeked_is!("function") && !self.input.has_linebreak_between_cur_and_peeked() {
return self.parse_async_fn_expr();
}
// Regexp
if {
match *cur!()? {
Regex(..) => true,
_ => false,
}
} {
return self.spanned(|p| match bump!() {
Regex(exp, flags) => Ok(ExprKind::Lit(Lit::Regex(Regex { exp, flags }))),
_ => unreachable!(),
});
}
tok!("null") | tok!("true") | tok!("false") | Num(..) | Str(..) => {
return spanned!({ self.parse_lit().map(ExprKind::Lit) })
}
tok!('[') => return self.parse_array_lit(),
tok!('{') => return self.parse_object(),
if is!('`') {
return self.spanned(|p| {
// parse template literal
Ok(ExprKind::Tpl(p.parse_tpl_lit(None)?))
});
}
// Handle FunctionExpression and GeneratorExpression
tok!("function") => return self.parse_fn_expr(),
tok!("class") => return self.parse_class_expr(),
Regex(_, _) => {
return spanned!({
match bump!() {
Regex(exp, flags) => Ok(ExprKind::Lit(Lit::Regex(Regex { exp, flags }))),
_ => unreachable!(),
}
});
}
tok!('`') => {
return spanned!({
// parse template literal
Ok(ExprKind::Tpl(self.parse_tpl_lit(None)?))
});
}
tok!('(') => {
return self.parse_paren_expr_or_arrow_fn(can_be_arrow);
}
_ => {}
if is!('(') {
return self.parse_paren_expr_or_arrow_fn(can_be_arrow);
}
if is!("let") || is!(IdentRef) {
return spanned!({
return self.spanned(|p| {
// TODO: Handle [Yield, Await]
let id = self.parse_ident_ref()?;
let id = p.parse_ident_ref()?;
if can_be_arrow && id.sym == js_word!("async") && is!(BindingIdent) {
// async a => body
let arg = self.parse_binding_ident().map(Pat::from)?;
let arg = p.parse_binding_ident().map(Pat::from)?;
let params = vec![arg];
expect!("=>");
let body = self.parse_fn_body(true, false)?;
let body = p.parse_fn_body(true, false)?;
Ok(ExprKind::Arrow(ArrowExpr {
body,
params,
@ -178,7 +186,7 @@ impl<I: Input> Parser<I> {
// async is parameter
let params = vec![id.into()];
let body = self.parse_fn_body(false, false)?;
let body = p.parse_fn_body(false, false)?;
Ok(ExprKind::Arrow(ArrowExpr {
body,
params,
@ -186,7 +194,7 @@ impl<I: Input> Parser<I> {
is_generator: false,
}))
} else {
return Ok(id.into());
return Ok(ExprKind::Ident(id.into()));
}
});
}
@ -194,8 +202,8 @@ impl<I: Input> Parser<I> {
unexpected!()
}
fn parse_array_lit(&mut self) -> PResult<Box<Expr>> {
spanned!({
fn parse_array_lit(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!('[');
let mut elems = vec![];
let mut comma = 1;
@ -212,7 +220,7 @@ impl<I: Input> Parser<I> {
}
elems.extend(iter::repeat(None).take(comma - 1));
comma = 0;
elems.push(self.include_in_expr(true).parse_expr_or_spread().map(Some)?);
elems.push(p.include_in_expr(true).parse_expr_or_spread().map(Some)?);
}
expect!(']');
@ -221,12 +229,12 @@ impl<I: Input> Parser<I> {
})
}
fn parse_member_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_member_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_member_expr_or_new_expr(false)
}
/// `is_new_expr`: true iff we are parsing production 'NewExpression'.
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<Box<Expr>> {
fn parse_member_expr_or_new_expr(&mut self, is_new_expr: bool) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
if eat!("new") {
let span_of_new = span!(start);
@ -257,14 +265,14 @@ impl<I: Input> Parser<I> {
// Parsed with 'MemberExpression' production.
let args = self.parse_args().map(Some)?;
let new_expr = ExprOrSuper::Expr(box Expr {
span: span!(start),
node: ExprKind::New(NewExpr { callee, args }),
});
// We should parse subscripts for MemberExpression.
return self.parse_subscripts(
ExprOrSuper::Expr(box Expr {
span: span!(start),
node: ExprKind::New(NewExpr { callee, args }),
}),
true,
);
// Because it's left recursive.
return self.parse_subscripts(new_expr, true);
}
// Parsed with 'NewExpression' production.
@ -285,12 +293,12 @@ impl<I: Input> Parser<I> {
/// Parse `NewExpresion`.
/// This includes `MemberExpression`.
fn parse_new_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_new_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_member_expr_or_new_expr(true)
}
/// Parse `Arguments[Yield, Await]`
pub(super) fn parse_args(&mut self) -> PResult<Vec<ExprOrSpread>> {
pub(super) fn parse_args(&mut self) -> PResult<'a, Vec<ExprOrSpread>> {
expect!('(');
let mut first = true;
@ -316,7 +324,7 @@ impl<I: Input> Parser<I> {
/// AssignmentExpression[+In, ?Yield, ?Await]
/// ...AssignmentExpression[+In, ?Yield, ?Await]
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<ExprOrSpread> {
pub(super) fn parse_expr_or_spread(&mut self) -> PResult<'a, ExprOrSpread> {
if eat!("...") {
self.include_in_expr(true)
.parse_assignment_expr()
@ -328,7 +336,7 @@ impl<I: Input> Parser<I> {
/// Parse paren expression or arrow function expression.
///
fn parse_paren_expr_or_arrow_fn(&mut self, can_be_arrow: bool) -> PResult<Box<Expr>> {
fn parse_paren_expr_or_arrow_fn(&mut self, can_be_arrow: bool) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// At this point, we can't know if it's parenthesized
@ -363,7 +371,7 @@ impl<I: Input> Parser<I> {
// ParenthesizedExpression cannot contain spread.
if expr_or_spreads.len() == 0 {
syntax_error!(SyntaxError::EmptyParenExpr)
syntax_error!(SyntaxError::EmptyParenExpr);
} else if expr_or_spreads.len() == 1 {
let expr = match expr_or_spreads.into_iter().next().unwrap() {
ExprOrSpread::Spread(_) => syntax_error!(SyntaxError::SpreadInParenExpr),
@ -387,10 +395,11 @@ impl<I: Input> Parser<I> {
// span of sequence expression should not include '(' and ')'
let seq_expr = box Expr {
span: Span {
start: exprs.first().unwrap().span.start,
end: exprs.last().unwrap().span.end,
},
span: Span::new(
exprs.first().unwrap().span.lo(),
exprs.last().unwrap().span.hi(),
Default::default(),
),
node: ExprKind::Seq(SeqExpr { exprs }),
};
return Ok(box Expr {
@ -400,7 +409,7 @@ impl<I: Input> Parser<I> {
}
}
fn parse_tpl_lit(&mut self, tag: Option<Box<Expr>>) -> PResult<TplLit> {
fn parse_tpl_lit(&mut self, tag: Option<Box<Expr>>) -> PResult<'a, TplLit> {
assert_and_bump!('`');
let is_tagged = tag.is_some();
@ -425,7 +434,7 @@ impl<I: Input> Parser<I> {
Ok(TplLit { tag, exprs, quasis })
}
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<TplElement> {
fn parse_tpl_element(&mut self, is_tagged: bool) -> PResult<'a, TplElement> {
let raw = match *cur!()? {
Template(_) => match bump!() {
Template(s) => s,
@ -443,7 +452,7 @@ impl<I: Input> Parser<I> {
})
}
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<Box<Expr>> {
fn parse_subscripts(&mut self, mut obj: ExprOrSuper, no_call: bool) -> PResult<'a, Box<Expr>> {
loop {
obj = match self.parse_subscript(obj, no_call)? {
(expr, false) => return Ok(expr),
@ -453,7 +462,11 @@ impl<I: Input> Parser<I> {
}
/// returned bool is true if this method should be called again.
fn parse_subscript(&mut self, obj: ExprOrSuper, no_call: bool) -> PResult<(Box<Expr>, bool)> {
fn parse_subscript(
&mut self,
obj: ExprOrSuper,
no_call: bool,
) -> PResult<'a, (Box<Expr>, bool)> {
let start = cur_pos!();
// member expression
// $obj.name
@ -527,7 +540,7 @@ impl<I: Input> Parser<I> {
/// Parse call, dot, and `[]`-subscript expressions.
///
///
pub(super) fn parse_lhs_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_lhs_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// `super()` can't be handled from parse_new_expr()
@ -541,9 +554,10 @@ impl<I: Input> Parser<I> {
match callee.node {
// If this is parsed using 'NewExpression' rule, just return it.
// Because it's not left-recursive.
ExprKind::New(NewExpr { args: None, .. }) => {
assert_ne!(
cur!(),
cur!().ok(),
Some(&LParen),
"parse_new_expr() should eat paren if it exists"
);
@ -574,21 +588,21 @@ impl<I: Input> Parser<I> {
Ok(callee)
}
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_expr_or_pat(&mut self) -> PResult<'a, Box<Expr>> {
self.parse_expr()
}
pub(super) fn parse_args_or_pats(&mut self) -> PResult<Vec<ExprOrSpread>> {
pub(super) fn parse_args_or_pats(&mut self) -> PResult<'a, Vec<ExprOrSpread>> {
self.parse_args()
}
}
/// simple leaf methods.
#[parser]
impl<I: Input> Parser<I> {
fn parse_yield_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
impl<'a, I: Input> Parser<'a, I> {
fn parse_yield_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("yield");
assert!(self.ctx.in_generator);
assert!(p.ctx.in_generator);
//TODO
// Spec says
@ -603,7 +617,7 @@ impl<I: Input> Parser<I> {
}))
} else {
let has_star = eat!('*');
let arg = self.parse_assignment_expr()?;
let arg = p.parse_assignment_expr()?;
Ok(ExprKind::Yield(YieldExpr {
arg: Some(arg),
@ -614,7 +628,7 @@ impl<I: Input> Parser<I> {
}
/// 12.2.5 Array Initializer
fn parse_lit(&mut self) -> PResult<Lit> {
fn parse_lit(&mut self) -> PResult<'a, Lit> {
let v = match *cur!()? {
Word(Null) => {
bump!();

View File

@ -3,9 +3,9 @@
use super::*;
#[parser]
impl<I: Input> Parser<I> {
impl<'a, I: Input> Parser<'a, I> {
/// Name from spec: 'LogicalORExpression'
pub(super) fn parse_bin_expr(&mut self) -> PResult<Box<Expr>> {
pub(super) fn parse_bin_expr(&mut self) -> PResult<'a, Box<Expr>> {
let left = self.parse_unary_expr()?;
return_if_arrow!(left);
@ -17,12 +17,16 @@ impl<I: Input> Parser<I> {
/// `minPrec` provides context that allows the function to stop and
/// defer further parser to one of its callers when it encounters an
/// operator that has a lower precedence than the set it is parsing.
fn parse_bin_op_recursively(&mut self, left: Box<Expr>, min_prec: u8) -> PResult<Box<Expr>> {
fn parse_bin_op_recursively(
&mut self,
left: Box<Expr>,
min_prec: u8,
) -> PResult<'a, Box<Expr>> {
let op = match {
// Return left on eof
match cur!() {
Some(cur) => cur,
None => return Ok(left),
Ok(cur) => cur,
Err(..) => return Ok(left),
}
} {
&Word(Keyword(In)) if self.ctx.include_in_expr => op!("in"),
@ -35,7 +39,7 @@ impl<I: Input> Parser<I> {
if op.precedence() <= min_prec {
trace!(
self.logger,
self.session.logger,
"returning {:?} without parsing {:?} because min_prec={}, prec={}",
left,
op,
@ -47,7 +51,7 @@ impl<I: Input> Parser<I> {
}
bump!();
trace!(
self.logger,
self.session.logger,
"parsing binary op {:?} min_prec={}, prec={}",
op,
min_prec,
@ -61,7 +65,11 @@ impl<I: Input> Parser<I> {
// returning "unexpected token '**'" on next.
// But it's not useful error message.
syntax_error!(SyntaxError::UnaryInExp)
syntax_error!(SyntaxError::UnaryInExp {
// FIXME: Use display
left: format!("{:?}", left),
left_span: left.span,
})
}
_ => {}
}
@ -80,7 +88,7 @@ impl<I: Input> Parser<I> {
};
let node = box Expr {
span: span!(left.span.start),
span: span!(left.span.lo()),
node: ExprKind::Bin(BinExpr { op, left, right }),
};
@ -91,7 +99,7 @@ impl<I: Input> Parser<I> {
/// Parse unary expression and update expression.
///
/// spec: 'UnaryExpression'
fn parse_unary_expr(&mut self) -> PResult<Box<Expr>> {
fn parse_unary_expr(&mut self) -> PResult<'a, Box<Expr>> {
let start = cur_pos!();
// Parse update expression
@ -165,16 +173,16 @@ impl<I: Input> Parser<I> {
Ok(expr)
}
fn parse_await_expr(&mut self) -> PResult<Box<Expr>> {
spanned!({
fn parse_await_expr(&mut self) -> PResult<'a, Box<Expr>> {
self.spanned(|p| {
assert_and_bump!("await");
assert!(self.ctx.in_async);
assert!(p.ctx.in_async);
if is!('*') {
syntax_error!(SyntaxError::AwaitStar)
syntax_error!(SyntaxError::AwaitStar);
}
let arg = self.parse_unary_expr()?;
let arg = p.parse_unary_expr()?;
Ok(ExprKind::Await(AwaitExpr { arg }))
})
}
@ -183,17 +191,14 @@ impl<I: Input> Parser<I> {
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
fn bin(s: &'static str) -> Box<Expr> {
let expr = mk(s).parse_bin_expr();
expr.unwrap_or_else(|err| {
panic!("failed to parse '{}' as a binary expression: {:?}", s, err)
test_parser(s, |p| {
p.parse_bin_expr().unwrap_or_else(|err| {
err.emit();
panic!("failed to parse '{}' as a binary expression", s)
})
})
}

View File

@ -1,35 +1,53 @@
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
use swc_common::DUMMY_SP;
fn lhs(s: &'static str) -> Box<Expr> {
mk(s)
.parse_lhs_expr()
.expect("failed to parse lhs expression")
test_parser(s, |p| {
p.parse_lhs_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse lhs expression")
})
})
}
fn new_expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_new_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
fn member_expr(s: &'static str) -> Box<Expr> {
test_parser(s, |p| {
p.parse_member_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
test_parser(s, |p| {
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
const span: Span = DUMMY_SP;
#[test]
fn new_expr_should_not_eat_too_much() {
assert_eq_ignore_span!(
mk("new Date().toString()").parse_new_expr().unwrap(),
new_expr("new Date().toString()"),
box Expr {
span: Default::default(),
node: ExprKind::Member(MemberExpr {
obj: mk("new Date()")
.parse_member_expr()
.map(ExprOrSuper::Expr)
.unwrap(),
obj: ExprOrSuper::Expr(member_expr("new Date()")),
prop: Ident {
sym: "toString".into(),
span: Default::default(),

View File

@ -3,16 +3,16 @@
use super::*;
#[parser]
impl<I: Input> Parser<I> {
impl<'a, I: Input> Parser<'a, I> {
/// IdentifierReference
pub(super) fn parse_ident_ref(&mut self) -> PResult<Ident> {
pub(super) fn parse_ident_ref(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
}
/// LabelIdentifier
pub(super) fn parse_label_ident(&mut self) -> PResult<Ident> {
pub(super) fn parse_label_ident(&mut self) -> PResult<'a, Ident> {
let ctx = self.ctx;
self.parse_ident(!ctx.in_generator, !ctx.in_async)
@ -20,10 +20,10 @@ impl<I: Input> Parser<I> {
/// Use this when spec says "IdentiferName".
/// This allows idents like `catch`.
pub(super) fn parse_ident_name(&mut self) -> PResult<Ident> {
spanned!({
pub(super) fn parse_ident_name(&mut self) -> PResult<'a, Ident> {
self.spanned(|p| {
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Ok(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
@ -37,11 +37,11 @@ impl<I: Input> Parser<I> {
/// Identifier
///
/// In strict mode, "yield" is SyntaxError if matched.
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<Ident> {
spanned!({
let strict = self.ctx.strict;
pub(super) fn parse_ident(&mut self, incl_yield: bool, incl_await: bool) -> PResult<'a, Ident> {
self.spanned(|p| {
let strict = p.session.cfg.strict;
let w = match cur!() {
Some(&Word(..)) => match bump!() {
Ok(&Word(..)) => match bump!() {
Word(w) => w,
_ => unreachable!(),
},
@ -82,24 +82,22 @@ impl<I: Input> Parser<I> {
Ident(ident) => Ok(ident),
Keyword(Yield) if incl_yield => Ok(js_word!("yield")),
Keyword(Await) if incl_await => Ok(js_word!("await")),
Keyword(..) | Null | True | False => {
syntax_error!(SyntaxError::ExpectedIdent)
}
Keyword(..) | Null | True | False => syntax_error!(SyntaxError::ExpectedIdent),
}
})
}
}
pub(super) trait MaybeOptionalIdentParser<Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident>;
pub(super) trait MaybeOptionalIdentParser<'a, Ident> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident>;
}
impl<I: Input> MaybeOptionalIdentParser<Ident> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Ident> {
impl<'a, I: Input> MaybeOptionalIdentParser<'a, Ident> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Ident> {
self.parse_binding_ident()
}
}
impl<I: Input> MaybeOptionalIdentParser<Option<Ident>> for Parser<I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
impl<'a, I: Input> MaybeOptionalIdentParser<'a, Option<Ident>> for Parser<'a, I> {
fn parse_maybe_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
self.parse_opt_binding_ident()
}
}

View File

@ -1,62 +1,33 @@
//! Note: this module requires `#![feature(nll)]`.
use swc_common::{BytePos, Span};
use lexer::{Input, Lexer};
use swc_common::{BytePos, Span, DUMMY_SP};
use token::*;
/// Input for parser.
pub trait Input: Iterator<Item = TokenAndSpan> {
fn had_line_break_before_last(&self) -> bool;
}
/// This struct is responsible for managing current token and peeked token.
pub(super) struct ParserInput<I: Input> {
iter: ItemIter<I>,
cur: Option<Item>,
/// Last of previous span
last_pos: BytePos,
pub(super) struct ParserInput<'a, I: Input> {
iter: Lexer<'a, I>,
/// Span of the previous token.
last_span: Span,
cur: Option<TokenAndSpan>,
/// Peeked token
next: Option<Item>,
next: Option<TokenAndSpan>,
}
/// One token
#[derive(Debug)]
struct Item {
token: Token,
/// Had a line break before this token?
had_line_break: bool,
span: Span,
}
struct ItemIter<I: Input>(I);
impl<I: Input> ItemIter<I> {
fn next(&mut self) -> Option<Item> {
match self.0.next() {
Some(TokenAndSpan { token, span }) => Some(Item {
token,
span,
had_line_break: self.0.had_line_break_before_last(),
}),
None => None,
}
}
}
impl<I: Input> ParserInput<I> {
pub const fn new(lexer: I) -> Self {
impl<'a, I: Input> ParserInput<'a, I> {
pub fn new(lexer: Lexer<'a, I>) -> Self {
ParserInput {
iter: ItemIter(lexer),
iter: lexer,
cur: None,
last_pos: BytePos(0),
last_span: DUMMY_SP,
next: None,
}
}
fn bump_inner(&mut self) -> Option<Token> {
let prev = self.cur.take();
self.last_pos = match prev {
Some(Item {
span: Span { end, .. },
..
}) => end,
_ => self.last_pos,
self.last_span = match prev {
Some(TokenAndSpan { span, .. }) => span,
_ => self.last_span,
};
// If we have peeked a token, take it instead of calling lexer.next()
@ -136,18 +107,13 @@ impl<I: Input> ParserInput<I> {
}
pub fn eat(&mut self, expected: &Token) -> bool {
match self.cur() {
Some(t) => {
if *expected == *t {
self.bump();
true
} else {
false
}
}
_ => false,
let v = self.is(expected);
if v {
self.bump();
}
v
}
pub fn eat_keyword(&mut self, kwd: Keyword) -> bool {
self.eat(&Word(Keyword(kwd)))
}
@ -155,11 +121,24 @@ impl<I: Input> ParserInput<I> {
pub fn cur_pos(&self) -> BytePos {
self.cur
.as_ref()
.map(|item| item.span.start)
.unwrap_or(BytePos(0))
.map(|item| item.span.lo())
.unwrap_or_else(|| self.last_pos())
}
/// Returns last of previous token.
pub const fn last_pos(&self) -> BytePos {
self.last_pos
pub fn cur_span(&self) -> Span {
self.cur
.as_ref()
.map(|item| item.span)
.unwrap_or(self.last_span)
}
/// Returns last byte position of previous token.
pub fn last_pos(&self) -> BytePos {
self.last_span.hi()
}
/// Returns span of the previous token.
pub const fn last_span(&self) -> Span {
self.last_span
}
}

View File

@ -1,20 +1,11 @@
macro_rules! unexpected {
($p:expr) => {{
let pos = cur_pos!($p);
let cur = cur!($p);
let cur = cur!($p)?;
unimplemented!("unexpected token: {:?} at {:?}", cur, pos);
}};
}
macro_rules! syntax_error {
($p:expr, $s:expr) => {{
let err = Error::Syntax($p.input.cur().cloned(), cur_pos!($p), $s, file!(), line!());
error!($p.logger, "failed to parse: {:?}", err);
let res: PResult<!> = Err(err);
res?
}};
}
/// This handles automatic semicolon insertion.
///
/// Returns bool.
@ -22,7 +13,7 @@ macro_rules! is {
($p:expr, BindingIdent) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.ctx.strict),
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
_ => false,
}
}};
@ -30,20 +21,20 @@ macro_rules! is {
($p:expr, IdentRef) => {{
match cur!($p) {
// TODO: Exclude some keywords
Some(&Word(ref w)) => !w.is_reserved_word($p.ctx.strict),
Ok(&Word(ref w)) => !w.is_reserved_word($p.session.cfg.strict),
_ => false,
}
}};
($p:expr, IdentName) => {{
match cur!($p) {
Some(&Word(..)) => true,
Ok(&Word(..)) => true,
_ => false,
}
}};
($p:expr, ';') => {{
$p.input.is(&Token::Semi) || cur!($p) == None || is!($p, '}')
$p.input.is(&Token::Semi) || eof!($p) || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
@ -55,9 +46,7 @@ macro_rules! is {
/// Returns true on eof.
macro_rules! eof {
($p:expr) => {
cur!($p) == None
};
($p:expr) => { cur!($p).is_err() };
}
macro_rules! peeked_is {
@ -92,8 +81,8 @@ macro_rules! assert_and_bump {
/// if token has data like string.
macro_rules! eat {
($p:expr, ';') => {{
debug!($p.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || cur!($p) == None || is!($p, '}')
debug!($p.session.logger, "eat(';'): cur={:?}", cur!($p));
$p.input.eat(&Token::Semi) || eof!($p) || is!($p, '}')
|| $p.input.had_line_break_before_cur()
}};
@ -141,9 +130,17 @@ macro_rules! expect_exact {
}
macro_rules! cur {
($parser:expr) => {
$parser.input.cur()
};
($p:expr) => {{
let pos = $p.input.last_pos();
let last = Span::new(pos, pos, Default::default());
match $p.input.cur() {
Some(c) => Ok(c),
None => Err($crate::error::Eof {
last,
handler: &$p.session.handler,
}),
}
}};
}
macro_rules! peek {
@ -151,9 +148,20 @@ macro_rules! peek {
assert!(
$p.input.knows_cur(),
"parser should not call peek() without knowing current token.
Current token is {:?}", cur!($p)
Current token is {:?}",
cur!($p),
);
$p.input.peek()
let pos = cur_pos!($p);
let last = Span::new(pos, pos, Default::default());
match $p.input.peek() {
Some(c) => Ok(c),
None => Err($crate::error::Eof {
//TODO: Use whole span
last,
handler: &$p.session.handler,
}),
}
}};
}
@ -168,17 +176,20 @@ macro_rules! bump {
}
macro_rules! cur_pos {
($p:expr) => { $p.input.cur_pos() }
($p:expr) => {{
let pos = $p.input.cur_pos();
pos
}}
}
macro_rules! last_pos {
($p:expr) => { $p.input.last_pos()};
($p:expr) => { $p.input.last_span().hi() };
}
macro_rules! return_if_arrow {
($p:expr, $expr:expr) => {{
let is_cur = match $p.state.potential_arrow_start {
Some(start) => $expr.span.start == start,
Some(start) => $expr.span.lo() == start,
None => false
};
if is_cur {

View File

@ -1,16 +1,17 @@
#![allow(dead_code, unused_variables)]
#![deny(non_snake_case)]
pub use self::input::Input;
use self::input::ParserInput;
use self::util::ParseObject;
use Session;
use ast::*;
use error::SyntaxError;
use lexer::Input;
use lexer::Lexer;
use parser_macros::parser;
use slog::Logger;
use std::ops::{Deref, DerefMut};
use std::option::NoneError;
use swc_atoms::JsWord;
use swc_common::{BytePos, Span};
use swc_common::errors::Diagnostic;
use token::*;
#[macro_use]
@ -24,34 +25,17 @@ mod pat;
pub mod input;
mod util;
pub type PResult<T> = Result<T, Error>;
#[derive(Debug)]
pub enum Error {
Eof,
Syntax(Option<Token>, BytePos, SyntaxError, &'static str, u32),
}
impl From<NoneError> for Error {
fn from(_: NoneError) -> Self {
Error::Eof
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct Config {}
pub type PResult<'a, T> = Result<T, Diagnostic<'a>>;
/// EcmaScript parser.
pub struct Parser<I: Input> {
logger: Logger,
cfg: Config,
pub struct Parser<'a, I: Input> {
session: Session<'a>,
ctx: Context,
state: State,
input: ParserInput<I>,
input: ParserInput<'a, I>,
}
#[derive(Debug, Clone, Copy, Default)]
struct Context {
strict: bool,
include_in_expr: bool,
/// If true, await expression is parsed, and "await" is treated as a
/// keyword.
@ -59,7 +43,6 @@ struct Context {
/// If true, yield expression is parsed, and "yield" is treated as a
/// keyword.
in_generator: bool,
in_module: bool,
}
#[derive(Debug, Default)]
@ -69,42 +52,40 @@ struct State {
potential_arrow_start: Option<BytePos>,
}
impl<I: Input> Parser<I> {
pub fn new_for_module(logger: Logger, lexer: I) -> Self {
impl<'a, I: Input> Parser<'a, I> {
pub fn new(session: Session<'a>, input: I) -> Self {
Parser {
logger,
input: ParserInput::new(lexer),
ctx: Context {
strict: true,
in_module: true,
..Default::default()
},
cfg: Default::default(),
state: Default::default(),
}
}
pub fn new_for_script(logger: Logger, lexer: I, strict: bool) -> Self {
Parser {
logger,
input: ParserInput::new(lexer),
ctx: Context {
strict,
..Default::default()
},
cfg: Default::default(),
session,
input: ParserInput::new(Lexer::new(session, input)),
ctx: Default::default(),
state: Default::default(),
}
}
#[parser]
pub fn parse_script(&mut self) -> PResult<Vec<Stmt>> {
pub fn parse_script(&mut self) -> PResult<'a, Vec<Stmt>> {
self.session.cfg.module = false;
self.parse_block_body(true, None)
}
#[parser]
pub fn parse_module(&mut self) -> PResult<Module> {
pub fn parse_module(&mut self) -> PResult<'a, Module> {
//TOOD: parse() -> PResult<'a, Program>
self.session.cfg.module = true;
self.session.cfg.strict = true;
self.parse_block_body(true, None)
.map(|body| Module { body })
}
}
#[cfg(test)]
fn test_parser<F, Ret>(s: &'static str, f: F) -> Ret
where
F: FnOnce(&mut Parser<::CharIndices>) -> Ret,
{
::with_test_sess(s, |session| {
f(&mut Parser::new(session, ::CharIndices(s.char_indices())))
})
}

View File

@ -3,11 +3,11 @@
use super::*;
#[parser]
impl<I: Input> Parser<I> {
impl<'a, I: Input> Parser<'a, I> {
/// Parse a object literal or object pattern.
pub(super) fn parse_object<T>(&mut self) -> PResult<T>
pub(super) fn parse_object<T>(&mut self) -> PResult<'a, T>
where
Self: ParseObject<T>,
Self: ParseObject<'a, T>,
{
let start = cur_pos!();
assert_and_bump!('{');
@ -34,7 +34,7 @@ impl<I: Input> Parser<I> {
}
/// spec: 'PropertyName'
pub(super) fn parse_prop_name(&mut self) -> PResult<PropName> {
pub(super) fn parse_prop_name(&mut self) -> PResult<'a, PropName> {
let start = cur_pos!();
let v = match *cur!()? {
@ -69,7 +69,7 @@ impl<I: Input> Parser<I> {
}
#[parser]
impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
impl<'a, I: Input> ParseObject<'a, Box<Expr>> for Parser<'a, I> {
type Prop = Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Box<Expr> {
@ -80,7 +80,7 @@ impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
}
/// spec: 'PropertyDefinition'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
let start = cur_pos!();
// Parse as 'MethodDefinition'
@ -105,10 +105,7 @@ impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
if eat!(':') {
let value = self.include_in_expr(true).parse_assignment_expr()?;
return Ok(Prop {
span: Span {
start,
end: value.span.end,
},
span: Span::new(start, value.span.hi(), Default::default()),
node: PropKind::KeyValue { key, value },
});
}
@ -133,7 +130,7 @@ impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
let is_reserved_word = {
// FIXME: Use extension trait instead of this.
let word = Word::from(ident.sym);
let r = word.is_reserved_word(self.ctx.strict);
let r = word.is_reserved_word(self.session.cfg.strict);
ident = Ident {
sym: word.into(),
..ident
@ -141,7 +138,7 @@ impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
r
};
if is_reserved_word {
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat)
syntax_error!(SyntaxError::ReservedWordInObjShorthandOrPat);
}
if eat!('=') {
@ -202,7 +199,7 @@ impl<I: Input> ParseObject<Box<Expr>> for Parser<I> {
}
#[parser]
impl<I: Input> ParseObject<Pat> for Parser<I> {
impl<'a, I: Input> ParseObject<'a, Pat> for Parser<'a, I> {
type Prop = ObjectPatProp;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Pat {
@ -213,7 +210,7 @@ impl<I: Input> ParseObject<Pat> for Parser<I> {
}
/// Production 'BindingProperty'
fn parse_object_prop(&mut self) -> PResult<Self::Prop> {
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop> {
let key = self.parse_prop_name()?;
if eat!(':') {
let value = box self.parse_binding_element()?;

View File

@ -3,8 +3,8 @@ use super::*;
use std::iter;
#[parser]
impl<I: Input> Parser<I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<Option<Ident>> {
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_opt_binding_ident(&mut self) -> PResult<'a, Option<Ident>> {
if is!(BindingIdent) {
self.parse_binding_ident().map(Some)
} else {
@ -15,19 +15,19 @@ impl<I: Input> Parser<I> {
/// babel: `parseBindingIdentifier`
///
/// spec: `BindingIdentifier`
pub(super) fn parse_binding_ident(&mut self) -> PResult<Ident> {
pub(super) fn parse_binding_ident(&mut self) -> PResult<'a, Ident> {
// "yield" and "await" is **lexically** accepted.
let ident = self.parse_ident(true, true)?;
if self.ctx.strict {
if self.session.cfg.strict {
if &*ident.sym == "arguments" || &*ident.sym == "eval" {
syntax_error!(SyntaxError::EvalAndArgumentsInStrict)
syntax_error!(SyntaxError::EvalAndArgumentsInStrict);
}
}
Ok(ident)
}
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<Pat> {
pub(super) fn parse_binding_pat_or_ident(&mut self) -> PResult<'a, Pat> {
match *cur!()? {
tok!("yield") | Word(..) => self.parse_binding_ident().map(Pat::from),
tok!('[') => self.parse_array_binding_pat(),
@ -43,7 +43,7 @@ impl<I: Input> Parser<I> {
}
/// babel: `parseBindingAtom`
pub(super) fn parse_binding_element(&mut self) -> PResult<Pat> {
pub(super) fn parse_binding_element(&mut self) -> PResult<'a, Pat> {
let start = cur_pos!();
let left = self.parse_binding_pat_or_ident()?;
@ -61,8 +61,8 @@ impl<I: Input> Parser<I> {
Ok(left)
}
fn parse_array_binding_pat(&mut self) -> PResult<Pat> {
spanned!({
fn parse_array_binding_pat(&mut self) -> PResult<'a, Pat> {
self.spanned(|p| {
assert_and_bump!('[');
let mut elems = vec![];
@ -79,7 +79,7 @@ impl<I: Input> Parser<I> {
let start = cur_pos!();
if eat!("...") {
let pat = self.parse_binding_pat_or_ident()?;
let pat = p.parse_binding_pat_or_ident()?;
let pat = Pat {
span: span!(start),
node: PatKind::Rest(box pat),
@ -87,7 +87,7 @@ impl<I: Input> Parser<I> {
elems.push(Some(pat));
break;
} else {
elems.push(self.parse_binding_element().map(Some)?);
elems.push(p.parse_binding_element().map(Some)?);
}
}
@ -98,13 +98,13 @@ impl<I: Input> Parser<I> {
}
/// spec: 'FormalParameter'
pub(super) fn parse_formal_param(&mut self) -> PResult<Pat> {
pub(super) fn parse_formal_param(&mut self) -> PResult<'a, Pat> {
self.parse_binding_element()
}
///
/// spec: 'FormalParameterList'
pub(super) fn parse_formal_params(&mut self) -> PResult<Vec<Pat>> {
pub(super) fn parse_formal_params(&mut self) -> PResult<'a, Vec<Pat>> {
let mut first = true;
let mut params = vec![];
@ -137,17 +137,17 @@ impl<I: Input> Parser<I> {
Ok(params)
}
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<Vec<Pat>> {
pub(super) fn parse_unique_formal_params(&mut self) -> PResult<'a, Vec<Pat>> {
// FIXME: This is wrong.
self.parse_formal_params()
}
}
#[parser]
impl<I: Input> Parser<I> {
impl<'a, I: Input> Parser<'a, I> {
/// This does not return 'rest' pattern because non-last parameter cannot be
/// rest.
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<Pat> {
pub(super) fn reparse_expr_as_pat(&mut self, box expr: Box<Expr>) -> PResult<'a, Pat> {
let span = expr.span;
match expr.node {
@ -209,7 +209,7 @@ impl<I: Input> Parser<I> {
prop
),
})
.collect::<PResult<_>>()?),
.collect::<PResult<'a, _>>()?),
});
}
ExprKind::Ident(ident) => return Ok(ident.into()),
@ -269,7 +269,7 @@ impl<I: Input> Parser<I> {
pub(super) fn parse_exprs_as_params(
&mut self,
mut exprs: Vec<ExprOrSpread>,
) -> PResult<Vec<Pat>> {
) -> PResult<'a, Vec<Pat>> {
let len = exprs.len();
if len == 0 {
return Ok(vec![]);

View File

@ -4,24 +4,25 @@ use swc_macros::ast_node;
mod module_item;
#[parser]
impl<I: Input> Parser<I> {
impl<'a, I: Input> Parser<'a, I> {
pub(super) fn parse_block_body<Type>(
&mut self,
top_level: bool,
end: Option<&Token>,
) -> PResult<Vec<Type>>
) -> PResult<'a, Vec<Type>>
where
Self: StmtLikeParser<Type>,
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
{
let mut stmts = vec![];
while {
let b = cur!() != end;
let b = cur!().ok() != end;
b
} {
let stmt = self.parse_stmt_like(true, top_level)?;
stmts.push(stmt);
}
if end.is_some() {
bump!();
}
@ -29,18 +30,18 @@ impl<I: Input> Parser<I> {
Ok(stmts)
}
fn parse_stmt(&mut self, top_level: bool) -> PResult<Stmt> {
fn parse_stmt(&mut self, top_level: bool) -> PResult<'a, Stmt> {
self.parse_stmt_internal(false, top_level)
}
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<Stmt> {
fn parse_stmt_list_item(&mut self, top_level: bool) -> PResult<'a, Stmt> {
self.parse_stmt_internal(true, top_level)
}
/// Parse a statement, declaration or module item.
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<Type>
fn parse_stmt_like<Type>(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Type>
where
Self: StmtLikeParser<Type>,
Self: StmtLikeParser<'a, Type>,
Type: From<Stmt>,
{
if <Self as StmtLikeParser<Type>>::accept_import_export() {
@ -52,91 +53,118 @@ impl<I: Input> Parser<I> {
.map(From::from)
}
fn parse_stmt_internal(&mut self, include_decl: bool, top_level: bool) -> PResult<Stmt> {
fn parse_stmt_internal(&mut self, include_decl: bool, top_level: bool) -> PResult<'a, Stmt> {
let start = cur_pos!();
if is_one_of!("break", "continue") {
return self.spanned(|p| {
let is_break = is!("break");
bump!();
let label = if eat!(';') {
None
} else {
let i = p.parse_label_ident().map(Some)?;
expect!(';');
i
};
Ok(if is_break {
StmtKind::Break(BreakStmt { label })
} else {
StmtKind::Continue(ContinueStmt { label })
})
});
}
if is!("debugger") {
return self.spanned(|p| {
bump!();
expect!(';');
Ok(StmtKind::Debugger)
});
}
if is!("do") {
return self.parse_do_stmt();
}
if is!("for") {
return self.parse_for_stmt();
}
if is!("function") {
if !include_decl {
unexpected!()
}
return self.parse_fn_decl().map(Stmt::from);
}
if is!("class") {
if !include_decl {
unexpected!()
}
return self.parse_class_decl().map(Stmt::from);
}
if is!("if") {
return self.parse_if_stmt();
}
if is!("return") {
return self.parse_return_stmt();
}
if is!("switch") {
return self.parse_switch_stmt();
}
if is!("throw") {
return self.parse_throw_stmt();
}
if is!("try") {
return self.parse_try_stmt();
}
if is!("with") {
return self.parse_with_stmt();
}
if is!("while") {
return self.parse_while_stmt();
}
if is!("var") || (include_decl && is!("const")) {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
// 'let' can start an identifier reference.
if include_decl && is!("let") {
let is_keyword = match peek!() {
Ok(t) => t.follows_keyword_let(self.session.cfg.strict),
_ => false,
};
if is_keyword {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
}
match *cur!()? {
Word(Keyword(w)) => match w {
Break | Continue => {
return spanned!({
bump!();
let is_break = w == Break;
let label = if eat!(';') {
None
} else {
let i = self.parse_label_ident().map(Some)?;
expect!(';');
i
};
Ok(if is_break {
StmtKind::Break(BreakStmt { label })
} else {
StmtKind::Continue(ContinueStmt { label })
})
})
}
Debugger => {
return spanned!({
bump!();
expect!(';');
Ok(StmtKind::Debugger)
})
}
Do => return self.parse_do_stmt(),
For => return self.parse_for_stmt(),
Function => {
if !include_decl {
unexpected!()
}
return self.parse_fn_decl().map(Stmt::from);
}
Class if !include_decl => unexpected!(),
Class => return self.parse_class_decl().map(Stmt::from),
If => return self.parse_if_stmt(),
Return => return self.parse_return_stmt(),
Switch => return self.parse_switch_stmt(),
Throw => return self.parse_throw_stmt(),
Try => return self.parse_try_stmt(),
// `let;` is identifier reference.
Let if include_decl => match peek!() {
Some(t) if t.follows_keyword_let(self.ctx.strict) => {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
_ => {}
},
Const | Var => {
if w == Var || include_decl {
let v = self.parse_var_stmt(false)?;
return Ok(Stmt {
span: v.span,
node: StmtKind::Decl(Decl::Var(v)),
});
}
// Handle `let;` by forwarding it to expression statement
}
While => return self.parse_while_stmt(),
With => return self.parse_with_stmt(),
_ => {}
},
LBrace => return spanned!({ self.parse_block().map(StmtKind::Block) }),
LBrace => return self.spanned(|p| p.parse_block().map(StmtKind::Block)),
Semi => {
return spanned!({
return self.spanned(|p| {
bump!();
Ok(StmtKind::Empty)
})
@ -181,24 +209,24 @@ impl<I: Input> Parser<I> {
}.into())
}
fn parse_if_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_if_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("if");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
expect!(')');
let cons = {
// Annex B
if !self.ctx.strict && is!("function") {
if !p.session.cfg.strict && is!("function") {
// TODO: report error?
}
box self.parse_stmt(false)?
box p.parse_stmt(false)?
};
let alt = if eat!("else") {
Some(box self.parse_stmt(false)?)
Some(box p.parse_stmt(false)?)
} else {
None
};
@ -207,26 +235,26 @@ impl<I: Input> Parser<I> {
})
}
fn parse_return_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_return_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("return");
let arg = if is!(';') {
None
} else {
self.include_in_expr(true).parse_expr().map(Some)?
p.include_in_expr(true).parse_expr().map(Some)?
};
expect!(';');
Ok(StmtKind::Return(ReturnStmt { arg }))
})
}
fn parse_switch_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_switch_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("switch");
expect!('(');
let discriminant = self.include_in_expr(true).parse_expr()?;
let discriminant = p.include_in_expr(true).parse_expr()?;
expect!(')');
let mut cur = None;
@ -240,10 +268,10 @@ impl<I: Input> Parser<I> {
bump!();
cases.extend(cur.take());
let test = if is_case {
self.include_in_expr(true).parse_expr().map(Some)?
p.include_in_expr(true).parse_expr().map(Some)?
} else {
if has_default {
syntax_error!(SyntaxError::MultipleDefault)
syntax_error!(SyntaxError::MultipleDefault);
}
has_default = true;
None
@ -254,7 +282,7 @@ impl<I: Input> Parser<I> {
} else {
match cur {
Some(ref mut cur) => {
cur.cons.push(self.parse_stmt_list_item(false)?);
cur.cons.push(p.parse_stmt_list_item(false)?);
}
None => unexpected!(),
}
@ -270,30 +298,31 @@ impl<I: Input> Parser<I> {
})
}
fn parse_throw_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_throw_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("throw");
if self.input.had_line_break_before_cur() {
syntax_error!(SyntaxError::LineBreakInThrow)
if p.input.had_line_break_before_cur() {
// TODO: Suggest throw arg;
syntax_error!(SyntaxError::LineBreakInThrow);
}
let arg = self.include_in_expr(true).parse_expr()?;
let arg = p.include_in_expr(true).parse_expr()?;
expect!(';');
Ok(StmtKind::Throw(ThrowStmt { arg }))
})
}
fn parse_try_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_try_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("try");
let block = self.parse_block()?;
let block = p.parse_block()?;
let handler = if eat!("catch") {
let param = self.parse_catch_param()?;
self.parse_block()
let param = p.parse_catch_param()?;
p.parse_block()
.map(|body| CatchClause { param, body })
.map(Some)?
} else {
@ -301,7 +330,7 @@ impl<I: Input> Parser<I> {
};
let finalizer = if eat!("finally") {
self.parse_block().map(Some)?
p.parse_block().map(Some)?
} else {
if handler.is_none() {
unexpected!();
@ -317,14 +346,14 @@ impl<I: Input> Parser<I> {
})
}
fn parse_catch_param(&mut self) -> PResult<Pat> {
fn parse_catch_param(&mut self) -> PResult<'a, Pat> {
expect!('(');
let pat = self.parse_binding_pat_or_ident()?;
expect!(')');
Ok(pat)
}
fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<VarDecl> {
fn parse_var_stmt(&mut self, for_loop: bool) -> PResult<'a, VarDecl> {
let start = cur_pos!();
let kind = match bump!() {
tok!("const") => VarDeclKind::Const,
@ -351,7 +380,7 @@ impl<I: Input> Parser<I> {
})
}
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<VarDeclarator> {
fn parse_var_declarator(&mut self, for_loop: bool) -> PResult<'a, VarDeclarator> {
let start = cur_pos!();
let name = self.parse_binding_pat_or_ident()?;
@ -363,7 +392,7 @@ impl<I: Input> Parser<I> {
// Destructuring bindings require initializers.
match name.node {
PatKind::Ident(..) => None,
_ => syntax_error!(SyntaxError::PatVarWithoutInit { span: span!(start) }),
_ => syntax_error!(SyntaxError::PatVarWithoutInit),
}
}
} else {
@ -378,13 +407,13 @@ impl<I: Input> Parser<I> {
});
}
fn parse_do_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_do_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("do");
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
expect!("while");
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
// We *may* eat semicolon.
let _ = eat!(';');
@ -393,49 +422,49 @@ impl<I: Input> Parser<I> {
})
}
fn parse_while_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_while_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("while");
expect!('(');
let test = self.include_in_expr(true).parse_expr()?;
let test = p.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(StmtKind::While(WhileStmt { test, body }))
})
}
fn parse_with_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_with_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("with");
expect!('(');
let obj = self.include_in_expr(true).parse_expr()?;
let obj = p.include_in_expr(true).parse_expr()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(StmtKind::With(WithStmt { obj, body }))
})
}
pub(super) fn parse_block(&mut self) -> PResult<BlockStmt> {
spanned!({
pub(super) fn parse_block(&mut self) -> PResult<'a, BlockStmt> {
self.spanned(|p| {
expect!('{');
let stmts = self.parse_block_body(false, Some(&RBrace))?;
let stmts = p.parse_block_body(false, Some(&RBrace))?;
Ok(stmts)
})
}
fn parse_labelled_stmt(&mut self, label: Ident) -> PResult<Stmt> {
let start = label.span.start;
fn parse_labelled_stmt(&mut self, label: Ident) -> PResult<'a, Stmt> {
let start = label.span.lo();
for l in &self.state.labels {
if label.sym == *l {
syntax_error!(SyntaxError::DuplicateLabel(label.sym.clone()))
syntax_error!(SyntaxError::DuplicateLabel(label.sym.clone()));
}
}
let body = box if is!("function") {
@ -450,13 +479,13 @@ impl<I: Input> Parser<I> {
})
}
fn parse_for_stmt(&mut self) -> PResult<Stmt> {
spanned!({
fn parse_for_stmt(&mut self) -> PResult<'a, Stmt> {
self.spanned(|p| {
assert_and_bump!("for");
expect!('(');
let head = self.parse_for_head()?;
let head = p.parse_for_head()?;
expect!(')');
let body = box self.parse_stmt(false)?;
let body = box p.parse_stmt(false)?;
Ok(match head {
ForHead::For { init, test, update } => StmtKind::For(ForStmt {
@ -471,11 +500,11 @@ impl<I: Input> Parser<I> {
})
}
fn parse_for_head(&mut self) -> PResult<ForHead> {
fn parse_for_head(&mut self) -> PResult<'a, ForHead> {
let start = cur_pos!();
if is_one_of!("const", "var")
|| (is!("let") && peek!()?.follows_keyword_let(self.ctx.strict))
|| (is!("let") && peek!()?.follows_keyword_let(self.session.cfg.strict))
{
let decl = self.parse_var_stmt(true)?;
@ -504,7 +533,7 @@ impl<I: Input> Parser<I> {
self.parse_normal_for_head(Some(VarDeclOrExpr::Expr(init)))
}
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<ForHead> {
fn parse_for_each_head(&mut self, left: VarDeclOrPat) -> PResult<'a, ForHead> {
let of = bump!() == tok!("of");
if of {
let right = self.include_in_expr(true).parse_assignment_expr()?;
@ -515,7 +544,7 @@ impl<I: Input> Parser<I> {
}
}
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<ForHead> {
fn parse_normal_for_head(&mut self, init: Option<VarDeclOrExpr>) -> PResult<'a, ForHead> {
let test = if eat_exact!(';') {
None
} else {
@ -551,16 +580,16 @@ enum ForHead {
},
}
pub(super) trait StmtLikeParser<Type> {
pub(super) trait StmtLikeParser<'a, Type> {
fn accept_import_export() -> bool;
fn handle_import_export(&mut self, top_level: bool) -> PResult<Type>;
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, Type>;
}
impl<I: Input> StmtLikeParser<Stmt> for Parser<I> {
impl<'a, I: Input> StmtLikeParser<'a, Stmt> for Parser<'a, I> {
fn accept_import_export() -> bool {
false
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<Stmt> {
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, Stmt> {
unreachable!()
}
}
@ -568,22 +597,27 @@ impl<I: Input> StmtLikeParser<Stmt> for Parser<I> {
#[cfg(test)]
mod tests {
use super::*;
use lexer::Lexer;
fn mk<'a>(s: &'static str) -> Parser<impl 'a + Input> {
let logger = ::testing::logger().new(o!("src" => s));
Parser::new_for_module(logger.clone(), Lexer::new_from_str(logger, s))
}
use swc_common::DUMMY_SP;
fn stmt(s: &'static str) -> Stmt {
mk(s).parse_stmt(true).expect("failed to parse a statement")
test_parser(s, |p| {
p.parse_stmt(true).unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse a statement")
})
})
}
fn expr(s: &'static str) -> Box<Expr> {
mk(s).parse_expr().expect("failed to parse an expression")
test_parser(s, |p| {
p.parse_expr().unwrap_or_else(|err| {
err.emit();
unreachable!("failed to parse an expression")
})
})
}
#[allow(non_upper_case_globals)]
const span: Span = Span::DUMMY;
const span: Span = DUMMY_SP;
#[test]
fn expr_stmt() {

View File

@ -1,8 +1,8 @@
use super::*;
#[parser]
impl<I: Input> Parser<I> {
fn parse_import(&mut self) -> PResult<ModuleDecl> {
impl<'a, I: Input> Parser<'a, I> {
fn parse_import(&mut self) -> PResult<'a, ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("import");
@ -77,7 +77,7 @@ impl<I: Input> Parser<I> {
}
/// Parse `foo`, `foo2 as bar` in `import { foo, foo2 as bar }`
fn parse_import_specifier(&mut self) -> PResult<ImportSpecifier> {
fn parse_import_specifier(&mut self) -> PResult<'a, ImportSpecifier> {
let start = cur_pos!();
match *cur!()? {
Word(..) => {
@ -86,10 +86,7 @@ impl<I: Input> Parser<I> {
if eat!("as") {
let local = self.parse_binding_ident()?;
return Ok(ImportSpecifier {
span: Span {
start,
end: local.span.end,
},
span: Span::new(start, local.span.hi(), Default::default()),
local,
node: ImportSpecifierKind::Specific {
imported: Some(orig_name),
@ -109,19 +106,20 @@ impl<I: Input> Parser<I> {
}
}
fn parse_imported_default_binding(&mut self) -> PResult<Ident> {
fn parse_imported_default_binding(&mut self) -> PResult<'a, Ident> {
self.parse_imported_binding()
}
fn parse_imported_binding(&mut self) -> PResult<Ident> {
self.with_ctx(Context {
fn parse_imported_binding(&mut self) -> PResult<'a, Ident> {
let ctx = Context {
in_async: false,
in_generator: false,
..self.ctx
}).parse_binding_ident()
};
self.with_ctx(ctx).parse_binding_ident()
}
fn parse_export(&mut self) -> PResult<ModuleDecl> {
fn parse_export(&mut self) -> PResult<'a, ModuleDecl> {
let start = cur_pos!();
assert_and_bump!("export");
@ -214,7 +212,7 @@ impl<I: Input> Parser<I> {
});
}
fn parse_export_specifier(&mut self) -> PResult<ExportSpecifier> {
fn parse_export_specifier(&mut self) -> PResult<'a, ExportSpecifier> {
let orig = self.parse_ident_name()?;
let exported = if eat!("as") {
@ -225,7 +223,7 @@ impl<I: Input> Parser<I> {
Ok(ExportSpecifier { orig, exported })
}
fn parse_from_clause_and_semi(&mut self) -> PResult<String> {
fn parse_from_clause_and_semi(&mut self) -> PResult<'a, String> {
expect!("from");
match *cur!()? {
Str(..) => match bump!() {
@ -241,12 +239,12 @@ impl<I: Input> Parser<I> {
}
#[parser]
impl<I: Input> StmtLikeParser<ModuleItem> for Parser<I> {
impl<'a, I: Input> StmtLikeParser<'a, ModuleItem> for Parser<'a, I> {
fn accept_import_export() -> bool {
true
}
fn handle_import_export(&mut self, top_level: bool) -> PResult<ModuleItem> {
fn handle_import_export(&mut self, top_level: bool) -> PResult<'a, ModuleItem> {
if !top_level {
syntax_error!(SyntaxError::NonTopLevelImportExport);
}

View File

@ -1,8 +1,10 @@
use super::*;
use swc_common::Spanned;
impl<I: Input> Parser<I> {
#[parser]
impl<'a, I: Input> Parser<'a, I> {
/// Original context is restored when returned guard is dropped.
pub(super) fn with_ctx(&mut self, ctx: Context) -> WithCtx<I> {
pub(super) fn with_ctx<'w>(&'w mut self, ctx: Context) -> WithCtx<'w, 'a, I> {
let orig_ctx = self.ctx;
self.ctx = ctx;
WithCtx {
@ -12,11 +14,12 @@ impl<I: Input> Parser<I> {
}
/// Original context is restored when returned guard is dropped.
pub(super) fn include_in_expr(&mut self, include_in_expr: bool) -> WithCtx<I> {
self.with_ctx(Context {
pub(super) fn include_in_expr<'w>(&'w mut self, include_in_expr: bool) -> WithCtx<'w, 'a, I> {
let ctx = Context {
include_in_expr,
..self.ctx
})
};
self.with_ctx(ctx)
}
/// Parse with given closure
@ -26,30 +29,42 @@ impl<I: Input> Parser<I> {
{
f(self)
}
pub(super) fn spanned<F, Node, Ret>(&mut self, f: F) -> PResult<'a, Node>
where
F: FnOnce(&mut Self) -> PResult<'a, Ret>,
Node: Spanned<Ret>,
{
let start = self.input.cur_pos();
let val = f(self)?;
let span = span!(start);
Ok(Spanned::from_unspanned(val, span))
}
}
pub trait ParseObject<Obj> {
pub trait ParseObject<'a, Obj> {
type Prop;
fn make_object(span: Span, props: Vec<Self::Prop>) -> Obj;
fn parse_object_prop(&mut self) -> PResult<Self::Prop>;
fn parse_object_prop(&mut self) -> PResult<'a, Self::Prop>;
}
pub struct WithCtx<'a, I: 'a + Input> {
inner: &'a mut Parser<I>,
pub struct WithCtx<'w, 'a: 'w, I: 'w + Input> {
inner: &'w mut Parser<'a, I>,
orig_ctx: Context,
}
impl<'a, I: Input> Deref for WithCtx<'a, I> {
type Target = Parser<I>;
fn deref(&self) -> &Parser<I> {
impl<'w, 'a, I: Input> Deref for WithCtx<'w, 'a, I> {
type Target = Parser<'a, I>;
fn deref(&self) -> &Parser<'a, I> {
&self.inner
}
}
impl<'a, I: Input> DerefMut for WithCtx<'a, I> {
fn deref_mut(&mut self) -> &mut Parser<I> {
impl<'w, 'a, I: Input> DerefMut for WithCtx<'w, 'a, I> {
fn deref_mut(&mut self) -> &mut Parser<'a, I> {
&mut self.inner
}
}
impl<'a, I: Input> Drop for WithCtx<'a, I> {
impl<'w, 'a, I: Input> Drop for WithCtx<'w, 'a, I> {
fn drop(&mut self) {
self.inner.ctx = self.orig_ctx;
}

View File

@ -109,6 +109,8 @@ pub enum Token {
/// TODO: Make Num as enum and separate decimal, binary, ..etc
#[kind(starts_expr)]
Num(Number),
Error,
}
#[derive(Kind, Debug, Clone, Copy, Eq, PartialEq, Hash)]
@ -181,6 +183,8 @@ impl BinOpToken {
#[derive(Debug, Clone, PartialEq)]
pub struct TokenAndSpan {
pub token: Token,
/// Had a line break before this token?
pub had_line_break: bool,
pub span: Span,
}

View File

@ -18,9 +18,10 @@ use std::panic::{catch_unwind, resume_unwind};
use std::path::Path;
use swc_common::{FoldWith, Folder};
use swc_common::Span;
use swc_common::errors::Handler;
use swc_ecma_parser::{CharIndices, PResult, Parser, Session};
use swc_ecma_parser::ast::*;
use swc_ecma_parser::lexer::Lexer;
use swc_ecma_parser::parser::{PResult, Parser};
use test::{test_main, Options, TestDesc, TestDescAndFn, TestFn, TestName};
use test::ShouldPanic::No;
@ -137,19 +138,23 @@ fn unit_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> {
);
let res = catch_unwind(move || {
let mut sess = TestSess::new();
if module {
let p = |ty, s| {
parse_module(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
let mut p = |ty, s| {
sess.parse_module(&file_name, s).unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
let expected = p("explicit ", &explicit);
assert_eq!(src, expected);
} else {
let p = |ty, s| {
parse_script(&file_name, s).unwrap_or_else(|err| {
panic!("failed to parse {}: {:?}\ncode:\n{}", ty, err, s)
let mut p = |ty, s| {
sess.parse_script(&file_name, s).unwrap_or_else(|err| {
err.emit();
panic!("failed to parse {} code:\n{}", ty, s)
})
};
let src = p("", &input);
@ -173,17 +178,45 @@ fn logger(file_name: &str, src: &str) -> Logger {
::testing::logger().new(o!("file name" => f, "src" => s,))
}
fn parse_script(file_name: &str, s: &str) -> PResult<Vec<Stmt>> {
let l = logger(file_name, s);
Parser::new_for_script(l.clone(), Lexer::new_from_str(l, s), false)
.parse_script()
.map(normalize)
struct TestSess {
handler: Handler,
logger: Logger,
}
fn parse_module(file_name: &str, s: &str) -> PResult<Module> {
let l = logger(file_name, s);
Parser::new_for_module(l.clone(), Lexer::new_from_str(l, s))
.parse_module()
.map(normalize)
impl TestSess {
fn new() -> Self {
let handler = ::swc_common::errors::Handler::with_tty_emitter(
::swc_common::errors::ColorConfig::Never,
true,
false,
None,
);
TestSess {
handler,
logger: ::testing::logger(),
}
}
fn parse_script<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Vec<Stmt>> {
self.with_parser(file_name, s, |p| p.parse_script().map(normalize))
}
fn parse_module<'a>(&'a mut self, file_name: &str, s: &str) -> PResult<'a, Module> {
self.with_parser(file_name, s, |p| p.parse_module().map(normalize))
}
fn with_parser<'a, F, Ret>(&'a mut self, file_name: &str, src: &str, f: F) -> PResult<'a, Ret>
where
F: FnOnce(&mut Parser<'a, CharIndices>) -> PResult<'a, Ret>,
{
self.logger = logger(file_name, src);
f(&mut Parser::new(
Session {
logger: &self.logger,
handler: &self.handler,
cfg: Default::default(),
},
::CharIndices(src.char_indices()),
))
}
}
fn normalize<T>(mut t: T) -> T
@ -206,7 +239,7 @@ struct Normalizer {
}
impl Folder<Span> for Normalizer {
fn fold(&mut self, _: Span) -> Span {
Span::DUMMY
Span::default()
}
}
impl Folder<ExprKind> for Normalizer {

View File

@ -3,10 +3,12 @@ use syn::fold::{self, Fold};
use syn::synom::Synom;
pub fn expand(_attr: TokenStream, item: Item) -> Item {
MyFolder { parser: None }.fold_item(item)
let item = InjectSelf { parser: None }.fold_item(item);
item
}
struct MyFolder {
struct InjectSelf {
parser: Option<Ident>,
}
@ -36,15 +38,39 @@ where
.0
}
impl Fold for MyFolder {
impl Fold for InjectSelf {
fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall {
/// Extract `p` from `self.parse_with(|p|{})`
fn get_parser_arg(call: &ExprMethodCall) -> Ident {
assert_eq!(call.args.len(), 1);
let expr = call.args.iter().next().unwrap();
let inputs = match expr {
&Expr::Closure(ref c) => &c.inputs,
_ => unreachable!("Parser.parse_with and Parser.spanned accepts a closure"),
};
assert_eq!(inputs.len(), 1);
let p = inputs.clone().into_iter().next().unwrap();
match p {
FnArg::Inferred(Pat::Ident(PatIdent { ident, .. })) => ident,
_ => unreachable!("Expected (|p| {..})"),
}
}
match i.method.as_ref() {
"parse_with" => {
"parse_with" | "spanned" => {
//TODO
return fold::fold_expr_method_call(&mut MyFolder { parser: None }, i);
let parser = get_parser_arg(&i);
return fold::fold_expr_method_call(
&mut InjectSelf {
parser: Some(parser),
},
i,
);
}
_ => {}
}
};
fold::fold_expr_method_call(self, i)
}
@ -68,16 +94,12 @@ impl Fold for MyFolder {
i
}
fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure {
if self.parser.is_none() {
// if we don't know what closure is this, don't do anything.
i
} else {
fold::fold_expr_closure(self, i)
}
}
fn fold_macro(&mut self, i: Macro) -> Macro {
let parser = match self.parser {
Some(s) => s,
_ => return i,
};
let name = i.path.dump().to_string();
let span = get_joinned_span(&i.path);
@ -104,7 +126,7 @@ impl Fold for MyFolder {
parse(i.tts.into()).expect("failed to parse input to spanned as a block");
let block = self.fold_block(block);
return Macro {
tts: TokenStream::from(quote_spanned!(span => self,))
tts: TokenStream::from(quote_spanned!(span => #parser, ))
.into_iter()
.chain(TokenStream::from(block.dump()))
.collect(),
@ -118,7 +140,7 @@ impl Fold for MyFolder {
| "peek" | "peek_ahead" | "last_pos" | "return_if_arrow" | "span" | "syntax_error"
| "unexpected" => {
let tts = if i.tts.is_empty() {
quote_spanned!(span => self).into()
quote_spanned!(span => #parser).into()
} else {
let mut args: Punctuated<Expr, token::Comma> = parse_args(i.tts.into());
let args = args.into_pairs()
@ -126,7 +148,7 @@ impl Fold for MyFolder {
.map(|arg| arg.dump())
.flat_map(|t| TokenStream::from(t));
TokenStream::from(quote_spanned!(span => self,))
TokenStream::from(quote_spanned!(span => #parser,))
.into_iter()
.chain(args)
.collect()

3
rls.toml Normal file
View File

@ -0,0 +1,3 @@
workspace_mode = true
unstable_features = true
build_lib = true

View File

@ -1,4 +0,0 @@
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecmascript;
pub extern crate swc_macros;

109
src/main.rs Normal file
View File

@ -0,0 +1,109 @@
#[macro_use]
extern crate clap;
extern crate rayon;
#[macro_use]
extern crate slog;
extern crate slog_envlogger;
extern crate slog_term;
pub extern crate swc;
pub extern crate swc_common;
use clap::{AppSettings, Arg, SubCommand};
use slog::{Drain, Logger};
use std::error::Error;
use std::io::{self, Write};
use std::path::Path;
use std::rc::Rc;
use swc::Compiler;
use swc_common::errors::{CodeMap, FilePathMapping, Handler};
fn main() {
run().unwrap()
}
fn run() -> Result<(), Box<Error>> {
let matches = app_from_crate!()
.global_settings(&[AppSettings::StrictUtf8, AppSettings::GlobalVersion])
.settings(&[AppSettings::SubcommandRequiredElseHelp])
.arg(
Arg::with_name("worker")
.short("w")
.long("worker")
.help("Number of threads to use for cpu-intensive tasks")
.takes_value(true)
.value_name("N"),
)
.subcommand(
SubCommand::with_name("js")
.arg(
Arg::with_name("passes")
.short("p")
.long("passes")
.takes_value(true)
.multiple(true),
)
.arg(
Arg::with_name("input file")
.required(true)
.takes_value(true),
),
)
.get_matches();
let thread_pool = rayon::Configuration::new()
.thread_name(|i| format!("swc-worker-{}", i))
.num_threads(
matches
.value_of("worker")
.map(|v| v.parse().expect("expected number for --worker"))
.unwrap_or(0),
)
.build()
.expect("failed to create rayon::ThreadPool?");
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let handler = Handler::with_tty_emitter(
::swc_common::errors::ColorConfig::Always,
true,
false,
Some(cm.clone()),
);
let comp = Compiler::new(logger(), cm.clone(), handler, thread_pool);
if let Some(ref matches) = matches.subcommand_matches("js") {
let input = matches.value_of("input file").unwrap();
let res = comp.parse_js(Path::new(input));
match res {
Ok(module) => println!("Module {:?}", module),
Err(err) => {
err.emit();
panic!("Failed to parse module");
}
}
}
Ok(())
}
fn logger() -> Logger {
fn no_timestamp(_: &mut Write) -> io::Result<()> {
Ok(())
}
fn root() -> Logger {
let dec = slog_term::TermDecorator::new()
.force_color()
.stderr()
.build();
let drain = slog_term::FullFormat::new(dec)
.use_custom_timestamp(no_timestamp)
.build();
let drain = slog_envlogger::new(drain);
let drain = std::sync::Mutex::new(drain).fuse();
let logger = Logger::root(drain, o!());
logger
}
root()
}

15
swc/Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[package]
name = "libswc"
version = "0.1.0"
authors = ["강동윤 <kdy1@outlook.kr>"]
[lib]
name = "swc"
[dependencies]
swc_atoms = { path = "../atoms" }
swc_ecmascript = { path = "../ecmascript" }
swc_common = { path = "../common" }
swc_macros = { path = "../macros" }
rayon = "0.9"
slog = "2"

52
swc/src/lib.rs Normal file
View File

@ -0,0 +1,52 @@
extern crate rayon;
extern crate slog;
pub extern crate swc_atoms;
pub extern crate swc_common;
pub extern crate swc_ecmascript;
pub extern crate swc_macros;
use slog::Logger;
use std::path::Path;
use std::rc::Rc;
use swc_common::errors::{CodeMap, FilePathMapping, Handler};
use swc_ecmascript::ast::Module;
use swc_ecmascript::parser::{CharIndices, PResult, Parser, Session as ParseSess};
pub struct Compiler {
codemap: Rc<CodeMap>,
threads: rayon::ThreadPool,
logger: Logger,
handler: Handler,
}
impl Compiler {
pub fn new(
logger: Logger,
codemap: Rc<CodeMap>,
handler: Handler,
threads: rayon::ThreadPool,
) -> Self {
Compiler {
codemap,
threads,
logger,
handler,
}
}
/// TODO
pub fn parse_js(&self, path: &Path) -> PResult<Module> {
let file = self.codemap
.load_file_and_lines(path)
.expect("failed to load file");
let src = file.src.clone().expect("we loaded this right before");
Parser::new(
ParseSess {
handler: &self.handler,
logger: &self.logger,
cfg: Default::default(),
},
CharIndices(src.char_indices()),
).parse_module()
}
}