swc/crates/swc_css_parser/tests/fixture.rs

473 lines
15 KiB
Rust

#![allow(clippy::needless_update)]
use std::path::PathBuf;
use swc_common::{errors::Handler, input::SourceFileInput, Span, Spanned};
use swc_css_ast::*;
use swc_css_parser::{
error::ErrorKind,
lexer::Lexer,
parse_tokens,
parser::{input::ParserInput, Parser, ParserConfig},
};
use swc_css_visit::{Visit, VisitWith};
use testing::NormalizedOutput;
pub struct Invalid {
pub span: Span,
}
#[testing::fixture("tests/fixture/**/input.css")]
fn tokens_input(input: PathBuf) {
testing::run_test2(false, |cm, handler| {
let fm = cm.load_file(&input).unwrap();
let tokens = {
let mut lexer = Lexer::new(SourceFileInput::from(&*fm), Default::default());
let mut tokens = vec![];
while let Ok(t) = lexer.next() {
tokens.push(t);
}
Tokens {
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
tokens,
}
};
let mut errors = vec![];
let _ss: Stylesheet = parse_tokens(
&tokens,
ParserConfig {
parse_values: true,
..Default::default()
},
&mut errors,
)
.expect("failed to parse tokens");
for err in errors {
err.to_diagnostics(&handler).emit();
}
if handler.has_errors() {
return Err(());
}
Ok(())
})
.unwrap();
}
fn test_pass(input: PathBuf, config: ParserConfig) {
testing::run_test2(false, |cm, handler| {
let ref_json_path = input.parent().unwrap().join("output.json");
let fm = cm.load_file(&input).unwrap();
let lexer = Lexer::new(SourceFileInput::from(&*fm), config);
let mut parser = Parser::new(lexer, config);
let stylesheet = parser.parse_all();
match stylesheet {
Ok(stylesheet) => {
let actual_json = serde_json::to_string_pretty(&stylesheet)
.map(NormalizedOutput::from)
.expect("failed to serialize stylesheet");
actual_json.clone().compare_to_file(&ref_json_path).unwrap();
if !config.allow_wrong_line_comments {
let mut lexer = Lexer::new(SourceFileInput::from(&*fm), Default::default());
let mut tokens = Tokens {
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
tokens: vec![],
};
loop {
let res = lexer.next();
match res {
Ok(t) => {
tokens.tokens.push(t);
}
Err(e) => {
if matches!(e.kind(), ErrorKind::Eof) {
break;
}
panic!("failed to lex tokens: {:?}", e)
}
}
}
let mut errors = vec![];
let ss_tok: Stylesheet = parse_tokens(
&tokens,
ParserConfig {
parse_values: true,
..Default::default()
},
&mut errors,
)
.expect("failed to parse token");
for err in errors {
err.to_diagnostics(&handler).emit();
}
let json_from_tokens = serde_json::to_string_pretty(&ss_tok)
.map(NormalizedOutput::from)
.expect("failed to serialize stylesheet from tokens");
assert_eq!(actual_json, json_from_tokens);
}
Ok(())
}
Err(err) => {
let mut d = err.to_diagnostics(&handler);
d.note(&format!("current token = {}", parser.dump_cur()));
d.emit();
Err(())
}
}
})
.unwrap();
}
#[testing::fixture("tests/fixture/**/input.css")]
fn pass(input: PathBuf) {
test_pass(
input,
ParserConfig {
parse_values: true,
..Default::default()
},
)
}
#[testing::fixture("tests/line-comment/**/input.css")]
fn line_comments(input: PathBuf) {
test_pass(
input,
ParserConfig {
parse_values: true,
allow_wrong_line_comments: true,
..Default::default()
},
)
}
#[testing::fixture("tests/recovery/**/input.css")]
fn recovery(input: PathBuf) {
let stderr_path = input.parent().unwrap().join("output.swc-stderr");
let mut errored = false;
let stderr = testing::run_test2(false, |cm, handler| {
if false {
// For type inference
return Ok(());
}
let ref_json_path = input.parent().unwrap().join("output.json");
let config = ParserConfig {
parse_values: true,
allow_wrong_line_comments: false,
};
let fm = cm.load_file(&input).unwrap();
let lexer = Lexer::new(SourceFileInput::from(&*fm), config);
let mut parser = Parser::new(lexer, config);
let stylesheet = parser.parse_all();
match stylesheet {
Ok(stylesheet) => {
let actual_json = serde_json::to_string_pretty(&stylesheet)
.map(NormalizedOutput::from)
.expect("failed to serialize stylesheet");
actual_json.clone().compare_to_file(&ref_json_path).unwrap();
{
let mut lexer = Lexer::new(SourceFileInput::from(&*fm), Default::default());
let mut tokens = Tokens {
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
tokens: vec![],
};
loop {
let res = lexer.next();
match res {
Ok(t) => {
tokens.tokens.push(t);
}
Err(e) => {
if matches!(e.kind(), ErrorKind::Eof) {
break;
}
panic!("failed to lex tokens: {:?}", e)
}
}
}
let mut errors = vec![];
let ss_tok: Stylesheet = parse_tokens(
&tokens,
ParserConfig {
parse_values: true,
..Default::default()
},
&mut errors,
)
.expect("failed to parse token");
for err in errors {
err.to_diagnostics(&handler).emit();
}
let json_from_tokens = serde_json::to_string_pretty(&ss_tok)
.map(NormalizedOutput::from)
.expect("failed to serialize stylesheet from tokens");
assert_eq!(actual_json, json_from_tokens);
}
Err(())
}
Err(err) => {
let mut d = err.to_diagnostics(&handler);
d.note(&format!("current token = {}", parser.dump_cur()));
d.emit();
errored = true;
Err(())
}
}
})
.unwrap_err();
if errored {
panic!("Parser should recover, but failed with {}", stderr);
}
stderr.compare_to_file(&stderr_path).unwrap();
}
struct SpanVisualizer<'a> {
handler: &'a Handler,
}
macro_rules! mtd {
($T:ty,$name:ident) => {
fn $name(&mut self, n: &$T) {
self.handler
.struct_span_err(n.span(), stringify!($T))
.emit();
n.visit_children_with(self);
}
};
}
impl Visit for SpanVisualizer<'_> {
mtd!(AtRule, visit_at_rule);
mtd!(AtSelector, visit_at_selector);
mtd!(AttrSelector, visit_attr_selector);
mtd!(BinValue, visit_bin_value);
mtd!(ClassSelector, visit_class_selector);
mtd!(Delimiter, visit_delimiter);
mtd!(ComplexSelector, visit_complex_selector);
mtd!(Combinator, visit_combinator);
mtd!(CompoundSelector, visit_compound_selector);
mtd!(Block, visit_block);
mtd!(SimpleBlock, visit_simple_block);
mtd!(Function, visit_function);
mtd!(HexColor, visit_hex_color);
mtd!(NestingSelector, visit_nesting_selector);
mtd!(IdSelector, visit_id_selector);
mtd!(TypeSelector, visit_type_selector);
mtd!(Number, visit_number);
mtd!(Ratio, visit_ratio);
mtd!(Percent, visit_percent);
mtd!(DeclarationName, visit_declaration_name);
mtd!(Declaration, visit_declaration);
mtd!(Nth, visit_nth);
mtd!(AnPlusB, visit_an_plus_b);
mtd!(PseudoClassSelector, visit_pseudo_class_selector);
mtd!(PseudoElementSelector, visit_pseudo_element_selector);
mtd!(Rule, visit_rule);
mtd!(Str, visit_str);
mtd!(QualifiedRule, visit_qualified_rule);
mtd!(Stylesheet, visit_stylesheet);
mtd!(SelectorList, visit_selector_list);
mtd!(SubclassSelector, visit_subclass_selector);
mtd!(TagSelector, visit_tag_selector);
mtd!(Ident, visit_ident);
mtd!(CustomIdent, visit_custom_ident);
mtd!(DashedIdent, visit_dashed_ident);
mtd!(Tokens, visit_tokens);
mtd!(Dimension, visit_dimension);
mtd!(Url, visit_url);
mtd!(UrlValue, visit_url_value);
mtd!(UrlValueRaw, visit_url_value_raw);
mtd!(UrlModifier, visit_url_modifier);
mtd!(Value, visit_value);
mtd!(CharsetRule, visit_charset_rule);
mtd!(DocumentRule, visit_document_rule);
mtd!(FontFaceRule, visit_font_face_rule);
mtd!(ImportRule, visit_import_rule);
mtd!(ImportHref, visit_import_href);
mtd!(ImportLayerName, visit_import_layer_name);
mtd!(ImportSupportsType, visit_import_supports_type);
mtd!(KeyframesRule, visit_keyframes_rule);
mtd!(KeyframeBlock, visit_keyframe_block);
mtd!(KeyframeSelector, visit_keyframe_selector);
mtd!(LayerName, visit_layer_name);
mtd!(LayerNameList, visit_layer_name_list);
mtd!(LayerPrelude, visit_layer_prelude);
mtd!(LayerRule, visit_layer_rule);
mtd!(MediaRule, visit_media_rule);
mtd!(MediaQueryList, visit_media_query_list);
mtd!(MediaQuery, visit_media_query);
mtd!(MediaCondition, visit_media_condition);
mtd!(MediaConditionWithoutOr, visit_media_condition_without_or);
mtd!(MediaConditionAllType, visit_media_condition_all_type);
mtd!(
MediaConditionWithoutOrType,
visit_media_condition_without_or_type
);
mtd!(MediaNot, visit_media_not);
mtd!(MediaAnd, visit_media_and);
mtd!(MediaOr, visit_media_or);
mtd!(MediaInParens, visit_media_in_parens);
mtd!(MediaFeatureName, visit_media_feature_name);
mtd!(MediaFeatureValue, visit_media_feature_value);
mtd!(MediaFeature, visit_media_feature);
mtd!(MediaFeaturePlain, visit_media_feature_plain);
mtd!(MediaFeatureBoolean, visit_media_feature_boolean);
mtd!(MediaFeatureRange, visit_media_feature_range);
mtd!(
MediaFeatureRangeInterval,
visit_media_feature_range_interval
);
mtd!(SupportsRule, visit_supports_rule);
mtd!(SupportsCondition, visit_supports_condition);
mtd!(SupportsConditionType, visit_supports_condition_type);
mtd!(SupportsNot, visit_supports_not);
mtd!(SupportsAnd, visit_supports_and);
mtd!(SupportsOr, visit_supports_or);
mtd!(SupportsInParens, visit_supports_in_parens);
mtd!(SupportsFeature, visit_supports_feature);
mtd!(NamespaceUri, visit_namespace_uri);
mtd!(NamespaceRule, visit_namespace_rule);
mtd!(NestedPageRule, visit_nested_page_rule);
mtd!(PageRule, visit_page_rule);
mtd!(PageRuleBlock, visit_page_rule_block);
mtd!(PageRuleBlockItem, visit_page_rule_block_item);
mtd!(PageSelector, visit_page_selector);
mtd!(AtRuleName, visit_at_rule_name);
mtd!(UnknownAtRule, visit_unknown_at_rule);
mtd!(ViewportRule, visit_viewport_rule);
mtd!(ColorProfileName, visit_color_profile_name);
mtd!(ColorProfileRule, visit_color_profile_rule);
fn visit_token_and_span(&mut self, n: &TokenAndSpan) {
self.handler
.struct_span_err(n.span, &format!("{:?}", n.token))
.emit();
}
}
#[testing::fixture("tests/fixture/**/input.css")]
fn span(input: PathBuf) {
let dir = input.parent().unwrap().to_path_buf();
let output = testing::run_test2(false, |cm, handler| {
// Type annotation
if false {
return Ok(());
}
let config = ParserConfig {
parse_values: true,
..Default::default()
};
let fm = cm.load_file(&input).unwrap();
let lexer = Lexer::new(SourceFileInput::from(&*fm), config);
let mut parser = Parser::new(lexer, config);
let stylesheet = parser.parse_all();
match stylesheet {
Ok(stylesheet) => {
stylesheet.visit_with(&mut SpanVisualizer { handler: &handler });
Err(())
}
Err(err) => {
let mut d = err.to_diagnostics(&handler);
d.note(&format!("current token = {}", parser.dump_cur()));
d.emit();
panic!();
}
}
})
.unwrap_err();
output
.compare_to_file(&dir.join("span.rust-debug"))
.unwrap();
}
#[testing::fixture("tests/errors/**/input.css")]
fn fail(input: PathBuf) {
let stderr_path = input.parent().unwrap().join("output.stderr");
let stderr = testing::run_test2(false, |cm, handler| -> Result<(), _> {
let config = ParserConfig {
parse_values: true,
..Default::default()
};
let fm = cm.load_file(&input).unwrap();
let lexer = Lexer::new(SourceFileInput::from(&*fm), config);
let mut parser = Parser::new(lexer, config);
let stylesheet = parser.parse_all();
match stylesheet {
Ok(..) => {}
Err(err) => {
err.to_diagnostics(&handler).emit();
}
}
for err in parser.take_errors() {
err.to_diagnostics(&handler).emit();
}
if !handler.has_errors() {
panic!("should error")
}
Err(())
})
.unwrap_err();
stderr.compare_to_file(&stderr_path).unwrap();
}