perf(html/parser): Improve lexer (#4796)

This commit is contained in:
Alexander Akait 2022-05-25 20:33:15 +03:00 committed by GitHub
parent 7a122eb089
commit 8894e77bc1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 9 additions and 18 deletions

View File

@ -124,7 +124,7 @@ where
state: State,
return_state: State,
errors: Vec<Error>,
last_start_tag_token: Option<Token>,
last_start_tag_name: Option<JsWord>,
pending_tokens: Vec<TokenAndSpan>,
cur_token: Option<Token>,
attribute_start_position: Option<BytePos>,
@ -152,7 +152,7 @@ where
state: State::Data,
return_state: State::Data,
errors: vec![],
last_start_tag_token: None,
last_start_tag_name: None,
pending_tokens: vec![],
cur_token: None,
attribute_start_position: None,
@ -194,8 +194,8 @@ where
take(&mut self.errors)
}
fn set_last_start_tag_token(&mut self, token: Token) {
self.last_start_tag_token = Some(token);
fn set_last_start_tag_name(&mut self, tag_name: &str) {
self.last_start_tag_name = Some(tag_name.into());
}
fn set_adjusted_current_node_to_html_namespace(&mut self, value: bool) {
@ -285,8 +285,8 @@ where
self.start_pos = end;
if let Token::StartTag { .. } = token {
self.last_start_tag_token = Some(token.clone());
if let Token::StartTag { tag_name, .. } = &token {
self.last_start_tag_name = Some(tag_name.clone());
}
let token_and_span = TokenAndSpan { span, token };
@ -365,11 +365,7 @@ where
// any. If no start tag has been emitted from this tokenizer, then no end tag
// token is appropriate.
fn current_end_tag_token_is_an_appropriate_end_tag_token(&mut self) -> bool {
if let Some(Token::StartTag {
tag_name: last_start_tag_name,
..
}) = &self.last_start_tag_token
{
if let Some(last_start_tag_name) = &self.last_start_tag_name {
if let Some(Token::EndTag {
tag_name: end_tag_name,
..

View File

@ -11,7 +11,7 @@ pub trait ParserInput: Clone + Iterator<Item = TokenAndSpan> {
fn take_errors(&mut self) -> Vec<Error>;
fn set_last_start_tag_token(&mut self, token: Token);
fn set_last_start_tag_name(&mut self, tag_name: &str);
fn set_input_state(&mut self, state: State);

View File

@ -583,12 +583,7 @@ fn html5lib_test_tokenizer(input: PathBuf) {
let last_start_tag: String = serde_json::from_value(last_start_tag.clone())
.expect("failed to get lastStartTag in test");
lexer.set_last_start_tag_token(Token::StartTag {
tag_name: last_start_tag.into(),
raw_tag_name: None,
self_closing: false,
attributes: vec![],
});
lexer.set_last_start_tag_name(&last_start_tag);
}
let mut actual_tokens = vec![];