perf(es/parser): Improve performance (#8224)

**Description:**

Small things
This commit is contained in:
Donny/강동윤 2023-11-06 14:28:30 +09:00 committed by GitHub
parent 1f3726dc64
commit e3e439dba6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 47 additions and 46 deletions

1
Cargo.lock generated
View File

@ -4530,6 +4530,7 @@ version = "0.141.10"
dependencies = [
"criterion",
"either",
"new_debug_unreachable",
"num-bigint",
"num-traits",
"phf 0.11.2",

View File

@ -136,6 +136,7 @@ impl<'a> Input for StringInput<'a> {
ret
}
#[inline]
fn uncons_while<F>(&mut self, mut pred: F) -> &str
where
F: FnMut(char) -> bool,

View File

@ -38,6 +38,7 @@ swc_common = { version = "0.33.3", path = "../swc_common" }
swc_ecma_ast = { version = "0.110.4", path = "../swc_ecma_ast" }
swc_ecma_visit = { version = "0.96.4", path = "../swc_ecma_visit", optional = true }
phf = { version = "0.11.2", features = ["macros"] }
new_debug_unreachable = "1.0.4"
[target.'cfg(not(any(target_arch = "wasm32", target_arch = "arm")))'.dependencies]
stacker = { version = "0.1.15", optional = true }

View File

@ -188,6 +188,7 @@ impl<'a> Lexer<'a> {
/// Skip comments or whitespaces.
///
/// See https://tc39.github.io/ecma262/#sec-white-space
#[inline(never)]
pub(super) fn skip_space<const LEX_COMMENTS: bool>(&mut self) -> LexResult<()> {
loop {
let (offset, newline) = {
@ -310,29 +311,7 @@ impl<'a> Lexer<'a> {
is_for_next = false;
}
if let Some(comments) = self.comments_buffer.as_mut() {
let src = unsafe {
// Safety: We got slice_start and end from self.input so those are valid.
self.input.slice(slice_start, end)
};
let s = &src[..src.len() - 2];
let cmt = Comment {
kind: CommentKind::Block,
span: Span::new(start, end, SyntaxContext::empty()),
text: s.into(),
};
let _ = self.input.peek();
if is_for_next {
comments.push_pending_leading(cmt);
} else {
comments.push(BufferedComment {
kind: BufferedCommentKind::Trailing,
pos: self.state.prev_hi,
comment: cmt,
});
}
}
self.store_comment(is_for_next, start, end, slice_start);
return Ok(());
}
@ -346,6 +325,39 @@ impl<'a> Lexer<'a> {
self.error(start, SyntaxError::UnterminatedBlockComment)?
}
#[inline(never)]
fn store_comment(
&mut self,
is_for_next: bool,
start: BytePos,
end: BytePos,
slice_start: BytePos,
) {
if let Some(comments) = self.comments_buffer.as_mut() {
let src = unsafe {
// Safety: We got slice_start and end from self.input so those are valid.
self.input.slice(slice_start, end)
};
let s = &src[..src.len() - 2];
let cmt = Comment {
kind: CommentKind::Block,
span: Span::new(start, end, SyntaxContext::empty()),
text: s.into(),
};
let _ = self.input.peek();
if is_for_next {
comments.push_pending_leading(cmt);
} else {
comments.push(BufferedComment {
kind: BufferedCommentKind::Trailing,
pos: self.state.prev_hi,
comment: cmt,
});
}
}
}
}
/// Implemented for `char`.

View File

@ -1,5 +1,6 @@
use std::{cell::RefCell, mem, mem::take, rc::Rc};
use debug_unreachable::debug_unreachable;
use lexer::TokenContexts;
use swc_common::{BytePos, Span};
@ -299,18 +300,6 @@ impl<I: Tokens> Buffer<I> {
});
}
#[inline(never)]
fn bump_inner(&mut self) {
let prev = self.cur.take();
self.prev_span = match prev {
Some(TokenAndSpan { span, .. }) => span,
_ => self.prev_span,
};
// If we have peeked a token, take it instead of calling lexer.next()
self.cur = self.next.take().or_else(|| self.iter.next());
}
#[allow(dead_code)]
pub fn cur_debug(&self) -> Option<&Token> {
self.cur.as_ref().map(|it| &it.token)
@ -327,18 +316,14 @@ impl<I: Tokens> Buffer<I> {
/// Returns current token.
pub fn bump(&mut self) -> Token {
#[cold]
#[inline(never)]
fn invalid_state() -> ! {
unreachable!(
"Current token is `None`. Parser should not call bump() without knowing current \
token"
)
}
let prev = match self.cur.take() {
Some(t) => t,
None => invalid_state(),
None => unsafe {
debug_unreachable!(
"Current token is `None`. Parser should not call bump() without knowing \
current token"
)
},
};
self.prev_span = prev.span;
@ -388,7 +373,8 @@ impl<I: Tokens> Buffer<I> {
#[inline]
pub fn cur(&mut self) -> Option<&Token> {
if self.cur.is_none() {
self.bump_inner();
// If we have peeked a token, take it instead of calling lexer.next()
self.cur = self.next.take().or_else(|| self.iter.next());
}
match &self.cur {