LibWeb: Rename CSS::Token::TokenType -> Type

This commit is contained in:
Sam Atkins 2021-07-09 20:54:06 +01:00 committed by Andreas Kling
parent 8671d79ba4
commit 9c14504bbb
Notes: sideshowbarker 2024-07-18 09:14:12 +09:00
6 changed files with 124 additions and 124 deletions

View File

@ -158,7 +158,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
String value;
// FIXME: Handle namespace prefixes.
if (current_value.is(Token::TokenType::Delim) && current_value.token().delim() == "*") {
if (current_value.is(Token::Type::Delim) && current_value.token().delim() == "*") {
// FIXME: Handle selectors like `*.foo`.
type = CSS::Selector::SimpleSelector::Type::Universal;
@ -167,28 +167,28 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
return result;
}
if (current_value.is(Token::TokenType::Hash)) {
if (current_value.is(Token::Type::Hash)) {
if (current_value.token().m_hash_type != Token::HashType::Id) {
dbgln("Selector contains hash token that is not an id: {}", current_value.to_string());
return {};
}
type = CSS::Selector::SimpleSelector::Type::Id;
value = current_value.token().m_value.to_string();
} else if (current_value.is(Token::TokenType::Delim) && current_value.token().delim() == ".") {
} else if (current_value.is(Token::Type::Delim) && current_value.token().delim() == ".") {
if (index >= parts.size())
return {};
current_value = parts.at(index);
index++;
if (!current_value.is(Token::TokenType::Ident)) {
if (!current_value.is(Token::Type::Ident)) {
dbgln("Expected an ident after '.', got: {}", current_value.to_string());
return {};
}
type = CSS::Selector::SimpleSelector::Type::Class;
value = current_value.to_string();
} else if (current_value.is(Token::TokenType::Delim) && current_value.token().delim() == "*") {
} else if (current_value.is(Token::Type::Delim) && current_value.token().delim() == "*") {
type = CSS::Selector::SimpleSelector::Type::Universal;
} else {
type = CSS::Selector::SimpleSelector::Type::TagName;
@ -212,7 +212,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
// FIXME: Handle namespace prefix for attribute name.
auto& attribute_part = attribute_parts.first();
if (!attribute_part.is(Token::TokenType::Ident)) {
if (!attribute_part.is(Token::Type::Ident)) {
dbgln("Expected ident for attribute name, got: '{}'", attribute_part.to_string());
return {};
}
@ -221,14 +221,14 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
simple_selector.attribute_name = attribute_part.token().ident();
size_t attribute_index = 1;
while (attribute_parts.at(attribute_index).is(Token::TokenType::Whitespace)) {
while (attribute_parts.at(attribute_index).is(Token::Type::Whitespace)) {
attribute_index++;
if (attribute_index >= attribute_parts.size())
return simple_selector;
}
auto& delim_part = attribute_parts.at(attribute_index);
if (!delim_part.is(Token::TokenType::Delim)) {
if (!delim_part.is(Token::Type::Delim)) {
dbgln("Expected a delim for attribute comparison, got: '{}'", delim_part.to_string());
return {};
}
@ -239,7 +239,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
} else {
attribute_index++;
auto& delim_second_part = attribute_parts.at(attribute_index);
if (!(delim_part.is(Token::TokenType::Delim) && delim_part.token().delim() == "=")) {
if (!(delim_part.is(Token::Type::Delim) && delim_part.token().delim() == "=")) {
dbgln("Expected a double delim for attribute comparison, got: '{}{}'", delim_part.to_string(), delim_second_part.to_string());
return {};
}
@ -262,7 +262,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
}
}
while (attribute_parts.at(attribute_index).is(Token::TokenType::Whitespace)) {
while (attribute_parts.at(attribute_index).is(Token::Type::Whitespace)) {
attribute_index++;
if (attribute_index >= attribute_parts.size()) {
dbgln("Attribute selector ended without a value to match.");
@ -271,7 +271,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
}
auto& value_part = attribute_parts.at(attribute_index);
if (!value_part.is(Token::TokenType::Ident) && !value_part.is(Token::TokenType::String)) {
if (!value_part.is(Token::Type::Ident) && !value_part.is(Token::Type::String)) {
dbgln("Expected a string or ident for the value to match attribute against, got: '{}'", value_part.to_string());
return {};
}
@ -282,7 +282,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
}
// FIXME: Pseudo-class selectors want to be their own Selector::SimpleSelector::Type according to the spec.
if (current_value.is(Token::TokenType::Colon)) {
if (current_value.is(Token::Type::Colon)) {
bool is_pseudo = false;
current_value = parts.at(index);
@ -290,7 +290,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
if (index >= parts.size())
return {};
if (current_value.is(Token::TokenType::Colon)) {
if (current_value.is(Token::Type::Colon)) {
is_pseudo = true;
current_value = parts.at(index);
index++;
@ -306,7 +306,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
current_value = parts.at(index);
index++;
if (current_value.is(Token::TokenType::Ident)) {
if (current_value.is(Token::Type::Ident)) {
auto pseudo_name = current_value.token().ident();
if (pseudo_name.equals_ignoring_case("link")) {
simple_selector.pseudo_class = CSS::Selector::SimpleSelector::PseudoClass::Link;
@ -377,7 +377,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
return {};
auto current_value = parts.at(index);
if (current_value.is(Token::TokenType::Delim)) {
if (current_value.is(Token::Type::Delim)) {
auto delim = current_value.token().delim();
if (delim == ">") {
relation = CSS::Selector::ComplexSelector::Relation::ImmediateChild;
@ -393,7 +393,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
return {};
auto next = parts.at(index + 1);
if (next.is(Token::TokenType::Delim) && next.token().delim() == "|") {
if (next.is(Token::Type::Delim) && next.token().delim() == "|") {
relation = CSS::Selector::ComplexSelector::Relation::Column;
index += 2;
}
@ -425,7 +425,7 @@ Optional<Selector> Parser::parse_single_selector(Vector<StyleComponentValueRule>
break;
auto current_value = parts.at(index);
if (current_value.is(Token::TokenType::Comma))
if (current_value.is(Token::Type::Comma))
break;
index++;
@ -895,10 +895,10 @@ Vector<Vector<StyleComponentValueRule>> Parser::parse_as_comma_separated_list_of
}
for (auto& list : lists) {
if (!list.is_empty() && list.first().is(Token::TokenType::Whitespace))
if (!list.is_empty() && list.first().is(Token::Type::Whitespace))
list.take_first();
if (!list.is_empty() && list.last().is(Token::TokenType::Whitespace))
if (!list.is_empty() && list.last().is(Token::Type::Whitespace))
list.take_last();
}

View File

@ -45,7 +45,7 @@ public:
return *m_function;
}
bool is(Token::TokenType type) const
bool is(Token::Type type) const
{
return m_type == ComponentType::Token && m_token.is(type);
}

View File

@ -14,92 +14,92 @@ String Token::to_string() const
StringBuilder builder;
switch (m_type) {
case TokenType::Invalid:
case Type::Invalid:
VERIFY_NOT_REACHED();
case TokenType::EndOfFile:
case Type::EndOfFile:
builder.append("__EOF__");
break;
case TokenType::Ident:
case Type::Ident:
//builder.append("Identifier");
builder.append(m_value.to_string());
return builder.to_string();
case TokenType::Function:
case Type::Function:
builder.append("Function");
break;
case TokenType::AtKeyword:
case Type::AtKeyword:
builder.append("@");
break;
case TokenType::Hash:
case Type::Hash:
builder.append("#");
builder.append(m_value.to_string());
return builder.to_string();
case TokenType::String:
case Type::String:
//builder.append("String");
builder.append(m_value.to_string());
return builder.to_string();
case TokenType::BadString:
case Type::BadString:
builder.append("Invalid String");
break;
case TokenType::Url:
case Type::Url:
builder.append("Url");
break;
case TokenType::BadUrl:
case Type::BadUrl:
builder.append("Invalid Url");
break;
case TokenType::Delim:
case Type::Delim:
//builder.append("Delimiter");
builder.append(m_value.to_string());
return builder.to_string();
case TokenType::Number:
case Type::Number:
//builder.append("Number");
builder.append(m_value.to_string());
builder.append(m_unit.to_string());
return builder.to_string();
case TokenType::Percentage:
case Type::Percentage:
//builder.append("Percentage");
builder.append(m_value.to_string());
builder.append(m_unit.to_string());
return builder.to_string();
case TokenType::Dimension:
case Type::Dimension:
//builder.append("Dimension");
builder.append(m_value.to_string());
builder.append(m_unit.to_string());
return builder.to_string();
case TokenType::Whitespace:
case Type::Whitespace:
builder.append("Whitespace");
break;
case TokenType::CDO:
case Type::CDO:
builder.append("CDO");
break;
case TokenType::CDC:
case Type::CDC:
builder.append("CDC");
break;
case TokenType::Colon:
case Type::Colon:
builder.append(":");
break;
case TokenType::Semicolon:
case Type::Semicolon:
builder.append(";");
break;
case TokenType::Comma:
case Type::Comma:
builder.append(",");
break;
case TokenType::OpenSquare:
case Type::OpenSquare:
builder.append("[");
break;
case TokenType::CloseSquare:
case Type::CloseSquare:
builder.append("]");
break;
case TokenType::OpenParen:
case Type::OpenParen:
builder.append("(");
break;
case TokenType::CloseParen:
case Type::CloseParen:
builder.append(")");
break;
case TokenType::OpenCurly:
case Type::OpenCurly:
builder.append("{");
break;
case TokenType::CloseCurly:
case Type::CloseCurly:
builder.append("}");
break;
}
@ -113,7 +113,7 @@ String Token::to_string() const
builder.append(" { value: '");
builder.append(m_value.to_string());
if (m_type == Token::TokenType::Hash) {
if (m_type == Token::Type::Hash) {
builder.append("', hash_type: '");
if (m_hash_type == Token::HashType::Unrestricted) {
builder.append("Unrestricted");
@ -122,7 +122,7 @@ String Token::to_string() const
}
}
if (m_type == Token::TokenType::Number) {
if (m_type == Token::Type::Number) {
builder.append("', number_type: '");
if (m_number_type == Token::NumberType::Integer) {
builder.append("Integer");
@ -131,7 +131,7 @@ String Token::to_string() const
}
}
if (m_type == Token::TokenType::Dimension) {
if (m_type == Token::Type::Dimension) {
builder.append("', number_type: '");
if (m_number_type == Token::NumberType::Integer) {
builder.append("Integer");
@ -147,21 +147,21 @@ String Token::to_string() const
return builder.to_string();
}
Token::TokenType Token::mirror_variant() const
Token::Type Token::mirror_variant() const
{
if (is_open_curly()) {
return TokenType::CloseCurly;
return Type::CloseCurly;
}
if (is_open_square()) {
return TokenType::CloseSquare;
return Type::CloseSquare;
}
if (is_open_paren()) {
return TokenType::CloseParen;
return Type::CloseParen;
}
return TokenType::Invalid;
return Type::Invalid;
}
String Token::bracket_string() const

View File

@ -17,7 +17,7 @@ class Token {
friend class Tokenizer;
public:
enum class TokenType {
enum class Type {
Invalid,
EndOfFile,
Ident,
@ -56,33 +56,33 @@ public:
Number,
};
bool is_eof() const { return m_type == TokenType::EndOfFile; }
bool is_ident() const { return m_type == TokenType::Ident; }
bool is_function() const { return m_type == TokenType::Function; }
bool is_at() const { return m_type == TokenType::AtKeyword; }
bool is_hash() const { return m_type == TokenType::Hash; }
bool is_string() const { return m_type == TokenType::String; }
bool is_bad_string() const { return m_type == TokenType::BadString; }
bool is_url() const { return m_type == TokenType::Url; }
bool is_bad_url() const { return m_type == TokenType::BadUrl; }
bool is_delim() const { return m_type == TokenType::Delim; }
bool is_number() const { return m_type == TokenType::Number; }
bool is_percentage() const { return m_type == TokenType::Percentage; }
bool is_dimension() const { return m_type == TokenType::Dimension; }
bool is_whitespace() const { return m_type == TokenType::Whitespace; }
bool is_cdo() const { return m_type == TokenType::CDO; }
bool is_cdc() const { return m_type == TokenType::CDC; }
bool is_colon() const { return m_type == TokenType::Colon; }
bool is_semicolon() const { return m_type == TokenType::Semicolon; }
bool is_comma() const { return m_type == TokenType::Comma; }
bool is_open_square() const { return m_type == TokenType::OpenSquare; }
bool is_close_square() const { return m_type == TokenType::CloseSquare; }
bool is_open_paren() const { return m_type == TokenType::OpenParen; }
bool is_close_paren() const { return m_type == TokenType::CloseParen; }
bool is_open_curly() const { return m_type == TokenType::OpenCurly; }
bool is_close_curly() const { return m_type == TokenType::CloseCurly; }
bool is_eof() const { return m_type == Type::EndOfFile; }
bool is_ident() const { return m_type == Type::Ident; }
bool is_function() const { return m_type == Type::Function; }
bool is_at() const { return m_type == Type::AtKeyword; }
bool is_hash() const { return m_type == Type::Hash; }
bool is_string() const { return m_type == Type::String; }
bool is_bad_string() const { return m_type == Type::BadString; }
bool is_url() const { return m_type == Type::Url; }
bool is_bad_url() const { return m_type == Type::BadUrl; }
bool is_delim() const { return m_type == Type::Delim; }
bool is_number() const { return m_type == Type::Number; }
bool is_percentage() const { return m_type == Type::Percentage; }
bool is_dimension() const { return m_type == Type::Dimension; }
bool is_whitespace() const { return m_type == Type::Whitespace; }
bool is_cdo() const { return m_type == Type::CDO; }
bool is_cdc() const { return m_type == Type::CDC; }
bool is_colon() const { return m_type == Type::Colon; }
bool is_semicolon() const { return m_type == Type::Semicolon; }
bool is_comma() const { return m_type == Type::Comma; }
bool is_open_square() const { return m_type == Type::OpenSquare; }
bool is_close_square() const { return m_type == Type::CloseSquare; }
bool is_open_paren() const { return m_type == Type::OpenParen; }
bool is_close_paren() const { return m_type == Type::CloseParen; }
bool is_open_curly() const { return m_type == Type::OpenCurly; }
bool is_close_curly() const { return m_type == Type::CloseCurly; }
bool is(TokenType type) const { return m_type == type; }
bool is(Type type) const { return m_type == type; }
StringView ident() const
{
@ -102,13 +102,13 @@ public:
return m_value.string_view();
}
TokenType mirror_variant() const;
Type mirror_variant() const;
String bracket_string() const;
String bracket_mirror_string() const;
String to_string() const;
private:
TokenType m_type { TokenType::Invalid };
Type m_type { Type::Invalid };
StringBuilder m_value;
StringBuilder m_unit;

View File

@ -262,14 +262,14 @@ U32Triplet Tokenizer::peek_triplet() const
return values;
}
Token Tokenizer::create_new_token(Token::TokenType type)
Token Tokenizer::create_new_token(Token::Type type)
{
Token token = {};
token.m_type = type;
return token;
}
Token Tokenizer::create_value_token(Token::TokenType type, String value)
Token Tokenizer::create_value_token(Token::Type type, String value)
{
Token token;
token.m_type = type;
@ -277,7 +277,7 @@ Token Tokenizer::create_value_token(Token::TokenType type, String value)
return token;
}
Token Tokenizer::create_value_token(Token::TokenType type, u32 value)
Token Tokenizer::create_value_token(Token::Type type, u32 value)
{
Token token = {};
token.m_type = type;
@ -342,7 +342,7 @@ Token Tokenizer::consume_an_ident_like_token()
auto next_two = peek_twin();
// if one of these ", ', ' "', " '"
if (is_quotation_mark(next_two.first) || is_apostrophe(next_two.first) || (is_whitespace(next_two.first) && (is_quotation_mark(next_two.second) || is_apostrophe(next_two.second)))) {
return create_value_token(Token::TokenType::Function, string);
return create_value_token(Token::Type::Function, string);
}
return consume_a_url_token();
@ -351,10 +351,10 @@ Token Tokenizer::consume_an_ident_like_token()
if (is_left_paren(peek_code_point())) {
(void)next_code_point();
return create_value_token(Token::TokenType::Function, string);
return create_value_token(Token::Type::Function, string);
}
return create_value_token(Token::TokenType::Ident, string);
return create_value_token(Token::Type::Ident, string);
}
CSSNumber Tokenizer::consume_a_number()
@ -447,7 +447,7 @@ String Tokenizer::consume_a_name()
}
Token Tokenizer::consume_a_url_token()
{
auto token = create_new_token(Token::TokenType::Url);
auto token = create_new_token(Token::Type::Url);
for (;;) {
if (!is_whitespace(peek_code_point())) {
break;
@ -488,14 +488,14 @@ Token Tokenizer::consume_a_url_token()
}
consume_the_remnants_of_a_bad_url();
return create_new_token(Token::TokenType::BadUrl);
return create_new_token(Token::Type::BadUrl);
}
if (is_quotation_mark(input) || is_apostrophe(input) || is_left_paren(input) || is_non_printable(input)) {
log_parse_error();
(void)next_code_point();
consume_the_remnants_of_a_bad_url();
return create_new_token(Token::TokenType::BadUrl);
return create_new_token(Token::Type::BadUrl);
}
if (is_reverse_solidus(input)) {
@ -505,7 +505,7 @@ Token Tokenizer::consume_a_url_token()
log_parse_error();
(void)next_code_point();
consume_the_remnants_of_a_bad_url();
return create_new_token(Token::TokenType::BadUrl);
return create_new_token(Token::Type::BadUrl);
}
}
@ -546,7 +546,7 @@ Token Tokenizer::consume_a_numeric_token()
{
auto number = consume_a_number();
if (would_start_an_identifier()) {
auto token = create_new_token(Token::TokenType::Dimension);
auto token = create_new_token(Token::Type::Dimension);
token.m_value.append(number.value);
token.m_number_type = number.type;
@ -559,12 +559,12 @@ Token Tokenizer::consume_a_numeric_token()
if (is_percent(peek_code_point())) {
(void)next_code_point();
auto token = create_new_token(Token::TokenType::Percentage);
auto token = create_new_token(Token::Type::Percentage);
token.m_value.append(number.value);
return token;
}
auto token = create_new_token(Token::TokenType::Number);
auto token = create_new_token(Token::Type::Number);
token.m_value.append(number.value);
token.m_number_type = number.type;
return token;
@ -642,7 +642,7 @@ bool Tokenizer::would_start_an_identifier(U32Triplet values)
Token Tokenizer::consume_string_token(u32 ending_code_point)
{
auto token = create_new_token(Token::TokenType::String);
auto token = create_new_token(Token::Type::String);
for (;;) {
auto input = next_code_point();
@ -657,7 +657,7 @@ Token Tokenizer::consume_string_token(u32 ending_code_point)
if (is_newline(input)) {
reconsume_current_input_code_point();
return create_new_token(Token::TokenType::BadString);
return create_new_token(Token::Type::BadString);
}
if (is_reverse_solidus(input)) {
@ -712,7 +712,7 @@ Token Tokenizer::consume_a_token()
auto input = next_code_point();
if (is_eof(input)) {
return create_new_token(Token::TokenType::EndOfFile);
return create_new_token(Token::Type::EndOfFile);
}
if (is_whitespace(input)) {
@ -724,7 +724,7 @@ Token Tokenizer::consume_a_token()
next = peek_code_point();
}
return create_new_token(Token::TokenType::Whitespace);
return create_new_token(Token::Type::Whitespace);
}
if (is_quotation_mark(input)) {
@ -739,7 +739,7 @@ Token Tokenizer::consume_a_token()
auto maybe_escape = peek_twin();
if (is_name_code_point(next_input) || is_valid_escape_sequence(maybe_escape)) {
auto token = create_new_token(Token::TokenType::Hash);
auto token = create_new_token(Token::Type::Hash);
if (would_start_an_identifier())
token.m_hash_type = Token::HashType::Id;
@ -750,7 +750,7 @@ Token Tokenizer::consume_a_token()
return token;
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_apostrophe(input)) {
@ -760,12 +760,12 @@ Token Tokenizer::consume_a_token()
if (is_left_paren(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is left paren");
return create_new_token(Token::TokenType::OpenParen);
return create_new_token(Token::Type::OpenParen);
}
if (is_right_paren(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is right paren");
return create_new_token(Token::TokenType::CloseParen);
return create_new_token(Token::Type::CloseParen);
}
if (is_plus_sign(input)) {
@ -775,12 +775,12 @@ Token Tokenizer::consume_a_token()
return consume_a_numeric_token();
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_comma(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is comma");
return create_new_token(Token::TokenType::Comma);
return create_new_token(Token::Type::Comma);
}
if (is_hyphen_minus(input)) {
@ -795,7 +795,7 @@ Token Tokenizer::consume_a_token()
(void)next_code_point();
(void)next_code_point();
return create_new_token(Token::TokenType::CDC);
return create_new_token(Token::Type::CDC);
}
if (would_start_an_identifier()) {
@ -803,7 +803,7 @@ Token Tokenizer::consume_a_token()
return consume_an_ident_like_token();
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_full_stop(input)) {
@ -813,17 +813,17 @@ Token Tokenizer::consume_a_token()
return consume_a_numeric_token();
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_colon(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is colon");
return create_new_token(Token::TokenType::Colon);
return create_new_token(Token::Type::Colon);
}
if (is_semicolon(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is semicolon");
return create_new_token(Token::TokenType::Semicolon);
return create_new_token(Token::Type::Semicolon);
}
if (is_less_than_sign(input)) {
@ -835,10 +835,10 @@ Token Tokenizer::consume_a_token()
(void)next_code_point();
(void)next_code_point();
return create_new_token(Token::TokenType::CDO);
return create_new_token(Token::Type::CDO);
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_at(input)) {
@ -846,15 +846,15 @@ Token Tokenizer::consume_a_token()
if (would_start_an_identifier()) {
auto name = consume_a_name();
return create_value_token(Token::TokenType::AtKeyword, input);
return create_value_token(Token::Type::AtKeyword, input);
}
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_open_square_bracket(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is open square");
return create_new_token(Token::TokenType::OpenSquare);
return create_new_token(Token::Type::OpenSquare);
}
if (is_reverse_solidus(input)) {
@ -865,22 +865,22 @@ Token Tokenizer::consume_a_token()
}
log_parse_error();
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
if (is_closed_square_bracket(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is closed square");
return create_new_token(Token::TokenType::CloseSquare);
return create_new_token(Token::Type::CloseSquare);
}
if (is_open_curly_bracket(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is open curly");
return create_new_token(Token::TokenType::OpenCurly);
return create_new_token(Token::Type::OpenCurly);
}
if (is_closed_curly_bracket(input)) {
dbgln_if(CSS_TOKENIZER_TRACE, "is closed curly");
return create_new_token(Token::TokenType::CloseCurly);
return create_new_token(Token::Type::CloseCurly);
}
if (is_ascii_digit(input)) {
@ -896,7 +896,7 @@ Token Tokenizer::consume_a_token()
}
dbgln_if(CSS_TOKENIZER_TRACE, "is delimiter");
return create_value_token(Token::TokenType::Delim, input);
return create_value_token(Token::Type::Delim, input);
}
}

View File

@ -76,9 +76,9 @@ private:
[[nodiscard]] U32Twin peek_twin() const;
[[nodiscard]] U32Triplet peek_triplet() const;
[[nodiscard]] static Token create_new_token(Token::TokenType);
[[nodiscard]] static Token create_value_token(Token::TokenType, String value);
[[nodiscard]] static Token create_value_token(Token::TokenType, u32 value);
[[nodiscard]] static Token create_new_token(Token::Type);
[[nodiscard]] static Token create_value_token(Token::Type, String value);
[[nodiscard]] static Token create_value_token(Token::Type, u32 value);
[[nodiscard]] Token consume_a_token();
[[nodiscard]] Token consume_string_token(u32 ending_code_point);
[[nodiscard]] Token consume_a_numeric_token();