2021-08-19 08:16:32 +03:00
|
|
|
use std::path::PathBuf;
|
2021-08-25 05:46:17 +03:00
|
|
|
use swc_common::{errors::Handler, input::SourceFileInput, Span, Spanned, DUMMY_SP};
|
2021-08-20 10:48:08 +03:00
|
|
|
use swc_css_ast::*;
|
2021-08-19 08:16:32 +03:00
|
|
|
use swc_css_parser::{
|
2021-08-25 05:46:17 +03:00
|
|
|
error::ErrorKind,
|
2021-08-19 08:16:32 +03:00
|
|
|
lexer::Lexer,
|
2021-08-25 05:46:17 +03:00
|
|
|
parse_tokens,
|
|
|
|
parser::{input::ParserInput, Parser, ParserConfig},
|
2021-08-19 08:16:32 +03:00
|
|
|
};
|
2021-09-09 08:36:56 +03:00
|
|
|
use swc_css_visit::{Node, Visit, VisitWith};
|
2021-08-19 08:16:32 +03:00
|
|
|
use testing::NormalizedOutput;
|
|
|
|
|
2021-09-09 08:36:56 +03:00
|
|
|
struct AssertValid;
|
|
|
|
|
|
|
|
impl Visit for AssertValid {
|
|
|
|
fn visit_pseudo_selector(&mut self, s: &PseudoSelector, _: &dyn Node) {
|
|
|
|
s.visit_children_with(self);
|
|
|
|
|
|
|
|
if s.args.tokens.is_empty() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
match &s.args.tokens[0].token {
|
|
|
|
Token::Colon | Token::Num(..) => return,
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
let mut errors = vec![];
|
|
|
|
|
2021-09-09 08:36:56 +03:00
|
|
|
let _selectors: Vec<ComplexSelector> =
|
2021-09-28 12:58:56 +03:00
|
|
|
parse_tokens(&s.args, ParserConfig { parse_values: true }, &mut errors)
|
2021-09-09 08:36:56 +03:00
|
|
|
.unwrap_or_else(|err| panic!("failed to parse tokens: {:?}\n{:?}", err, s.args));
|
2021-09-28 12:58:56 +03:00
|
|
|
|
|
|
|
for err in errors {
|
|
|
|
panic!("{:?}", err);
|
|
|
|
}
|
2021-09-09 08:36:56 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-08 04:19:14 +03:00
|
|
|
#[testing::fixture("tests/fixture/**/input.css")]
|
|
|
|
fn tokens_input(input: PathBuf) {
|
|
|
|
eprintln!("Input: {}", input.display());
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
testing::run_test2(false, |cm, handler| {
|
2021-09-08 04:19:14 +03:00
|
|
|
let fm = cm.load_file(&input).unwrap();
|
|
|
|
|
|
|
|
let tokens = {
|
|
|
|
let mut lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
|
|
|
|
let mut tokens = vec![];
|
|
|
|
|
|
|
|
while let Ok(t) = lexer.next() {
|
|
|
|
tokens.push(t);
|
|
|
|
}
|
|
|
|
Tokens {
|
|
|
|
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
|
|
|
|
tokens,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
let mut errors = vec![];
|
|
|
|
let ss: Stylesheet =
|
|
|
|
parse_tokens(&tokens, ParserConfig { parse_values: true }, &mut errors)
|
|
|
|
.expect("failed to parse tokens");
|
|
|
|
|
|
|
|
for err in errors {
|
|
|
|
err.to_diagnostics(&handler).emit();
|
|
|
|
}
|
2021-09-08 04:19:14 +03:00
|
|
|
|
2021-09-09 08:36:56 +03:00
|
|
|
ss.visit_with(&Invalid { span: DUMMY_SP }, &mut AssertValid);
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
if handler.has_errors() {
|
|
|
|
return Err(());
|
|
|
|
}
|
|
|
|
|
2021-09-08 04:19:14 +03:00
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
|
2021-08-19 08:16:32 +03:00
|
|
|
#[testing::fixture("tests/fixture/**/input.css")]
|
|
|
|
fn pass(input: PathBuf) {
|
|
|
|
eprintln!("Input: {}", input.display());
|
|
|
|
|
|
|
|
testing::run_test2(false, |cm, handler| {
|
|
|
|
let ref_json_path = input.parent().unwrap().join("output.json");
|
|
|
|
|
|
|
|
let fm = cm.load_file(&input).unwrap();
|
|
|
|
let lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut parser = Parser::new(lexer, ParserConfig { parse_values: true });
|
|
|
|
|
|
|
|
let stylesheet = parser.parse_all();
|
|
|
|
|
|
|
|
match stylesheet {
|
|
|
|
Ok(stylesheet) => {
|
|
|
|
let actual_json = serde_json::to_string_pretty(&stylesheet)
|
|
|
|
.map(NormalizedOutput::from)
|
|
|
|
.expect("failed to serialize stylesheet");
|
|
|
|
|
2021-08-25 05:46:17 +03:00
|
|
|
actual_json.clone().compare_to_file(&ref_json_path).unwrap();
|
|
|
|
|
|
|
|
{
|
|
|
|
let mut lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut tokens = Tokens {
|
|
|
|
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
|
|
|
|
tokens: vec![],
|
|
|
|
};
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let res = lexer.next();
|
|
|
|
match res {
|
|
|
|
Ok(t) => {
|
|
|
|
tokens.tokens.push(t);
|
|
|
|
}
|
|
|
|
|
|
|
|
Err(e) => {
|
|
|
|
if matches!(e.kind(), ErrorKind::Eof) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
panic!("failed to lex tokens: {:?}", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
let mut errors = vec![];
|
2021-08-25 05:46:17 +03:00
|
|
|
let ss_tok: Stylesheet =
|
2021-09-28 12:58:56 +03:00
|
|
|
parse_tokens(&tokens, ParserConfig { parse_values: true }, &mut errors)
|
2021-08-25 05:46:17 +03:00
|
|
|
.expect("failed to parse token");
|
|
|
|
|
2021-09-28 12:58:56 +03:00
|
|
|
for err in errors {
|
|
|
|
err.to_diagnostics(&handler).emit();
|
|
|
|
}
|
|
|
|
|
2021-08-25 05:46:17 +03:00
|
|
|
let json_from_tokens = serde_json::to_string_pretty(&ss_tok)
|
|
|
|
.map(NormalizedOutput::from)
|
|
|
|
.expect("failed to serialize stylesheet from tokens");
|
|
|
|
|
|
|
|
assert_eq!(actual_json, json_from_tokens);
|
|
|
|
}
|
2021-08-19 08:16:32 +03:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
let mut d = err.to_diagnostics(&handler);
|
|
|
|
d.note(&format!("current token = {}", parser.dump_cur()));
|
|
|
|
|
|
|
|
d.emit();
|
|
|
|
|
|
|
|
Err(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
|
2021-09-29 08:38:49 +03:00
|
|
|
#[testing::fixture("tests/recovery/**/input.css")]
|
|
|
|
fn recovery(input: PathBuf) {
|
|
|
|
eprintln!("Input: {}", input.display());
|
|
|
|
let stderr_path = input.parent().unwrap().join("output.swc-stderr");
|
|
|
|
|
|
|
|
let mut errored = false;
|
|
|
|
|
|
|
|
let stderr = testing::run_test2(false, |cm, handler| {
|
|
|
|
if false {
|
|
|
|
// For type inference
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let ref_json_path = input.parent().unwrap().join("output.json");
|
|
|
|
|
|
|
|
let fm = cm.load_file(&input).unwrap();
|
|
|
|
let lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut parser = Parser::new(lexer, ParserConfig { parse_values: true });
|
|
|
|
|
|
|
|
let stylesheet = parser.parse_all();
|
|
|
|
|
|
|
|
match stylesheet {
|
|
|
|
Ok(stylesheet) => {
|
|
|
|
let actual_json = serde_json::to_string_pretty(&stylesheet)
|
|
|
|
.map(NormalizedOutput::from)
|
|
|
|
.expect("failed to serialize stylesheet");
|
|
|
|
|
|
|
|
actual_json.clone().compare_to_file(&ref_json_path).unwrap();
|
|
|
|
|
|
|
|
{
|
|
|
|
let mut lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut tokens = Tokens {
|
|
|
|
span: Span::new(fm.start_pos, fm.end_pos, Default::default()),
|
|
|
|
tokens: vec![],
|
|
|
|
};
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let res = lexer.next();
|
|
|
|
match res {
|
|
|
|
Ok(t) => {
|
|
|
|
tokens.tokens.push(t);
|
|
|
|
}
|
|
|
|
|
|
|
|
Err(e) => {
|
|
|
|
if matches!(e.kind(), ErrorKind::Eof) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
panic!("failed to lex tokens: {:?}", e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut errors = vec![];
|
|
|
|
let ss_tok: Stylesheet =
|
|
|
|
parse_tokens(&tokens, ParserConfig { parse_values: true }, &mut errors)
|
|
|
|
.expect("failed to parse token");
|
|
|
|
|
|
|
|
for err in errors {
|
|
|
|
err.to_diagnostics(&handler).emit();
|
|
|
|
}
|
|
|
|
|
|
|
|
let json_from_tokens = serde_json::to_string_pretty(&ss_tok)
|
|
|
|
.map(NormalizedOutput::from)
|
|
|
|
.expect("failed to serialize stylesheet from tokens");
|
|
|
|
|
|
|
|
assert_eq!(actual_json, json_from_tokens);
|
|
|
|
}
|
|
|
|
|
|
|
|
Err(())
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
let mut d = err.to_diagnostics(&handler);
|
|
|
|
d.note(&format!("current token = {}", parser.dump_cur()));
|
|
|
|
|
|
|
|
d.emit();
|
|
|
|
|
|
|
|
errored = true;
|
|
|
|
|
|
|
|
Err(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap_err();
|
|
|
|
|
|
|
|
if errored {
|
|
|
|
panic!("Parser should recover, but failed with {}", stderr);
|
|
|
|
}
|
|
|
|
|
|
|
|
stderr.compare_to_file(&stderr_path).unwrap();
|
|
|
|
}
|
|
|
|
|
2021-08-20 10:48:08 +03:00
|
|
|
struct SpanVisualizer<'a> {
|
|
|
|
handler: &'a Handler,
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! mtd {
|
|
|
|
($T:ty,$name:ident) => {
|
|
|
|
fn $name(&mut self, n: &$T, _: &dyn swc_css_visit::Node) {
|
|
|
|
self.handler
|
|
|
|
.struct_span_err(n.span(), stringify!($T))
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
n.visit_children_with(self);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Visit for SpanVisualizer<'_> {
|
|
|
|
mtd!(ArrayValue, visit_array_value);
|
|
|
|
mtd!(AtRule, visit_at_rule);
|
|
|
|
mtd!(AtSelector, visit_at_selector);
|
|
|
|
mtd!(AtTextValue, visit_at_text_value);
|
|
|
|
mtd!(AttrSelector, visit_attr_selector);
|
|
|
|
mtd!(BinValue, visit_bin_value);
|
|
|
|
mtd!(BraceValue, visit_brace_value);
|
|
|
|
mtd!(ClassSelector, visit_class_selector);
|
|
|
|
mtd!(SpaceValues, visit_space_values);
|
|
|
|
mtd!(ComplexSelector, visit_complex_selector);
|
|
|
|
mtd!(CompoundSelector, visit_compound_selector);
|
|
|
|
mtd!(DeclBlock, visit_decl_block);
|
|
|
|
mtd!(FnValue, visit_fn_value);
|
|
|
|
mtd!(HashValue, visit_hash_value);
|
|
|
|
mtd!(IdSelector, visit_id_selector);
|
|
|
|
mtd!(NamespacedName, visit_namespaced_name);
|
|
|
|
mtd!(Num, visit_num);
|
|
|
|
mtd!(ParenValue, visit_paren_value);
|
|
|
|
mtd!(PercentValue, visit_percent_value);
|
|
|
|
mtd!(Property, visit_property);
|
|
|
|
mtd!(PseudoSelector, visit_pseudo_selector);
|
|
|
|
mtd!(Rule, visit_rule);
|
|
|
|
mtd!(Str, visit_str);
|
|
|
|
mtd!(StyleRule, visit_style_rule);
|
|
|
|
mtd!(Stylesheet, visit_stylesheet);
|
|
|
|
mtd!(SubclassSelector, visit_subclass_selector);
|
|
|
|
mtd!(TagSelector, visit_tag_selector);
|
|
|
|
mtd!(Text, visit_text);
|
|
|
|
mtd!(Tokens, visit_tokens);
|
|
|
|
mtd!(Unit, visit_unit);
|
|
|
|
mtd!(UnitValue, visit_unit_value);
|
|
|
|
mtd!(UniversalSelector, visit_universal_selector);
|
|
|
|
mtd!(UrlValue, visit_url_value);
|
|
|
|
mtd!(Value, visit_value);
|
|
|
|
|
|
|
|
mtd!(AndMediaQuery, visit_and_media_query);
|
|
|
|
mtd!(AndSupportQuery, visit_and_support_query);
|
|
|
|
mtd!(CharsetRule, visit_charset_rule);
|
|
|
|
mtd!(CommaMediaQuery, visit_comma_media_query);
|
|
|
|
mtd!(DocumentRule, visit_document_rule);
|
|
|
|
mtd!(FontFaceRule, visit_font_face_rule);
|
2021-10-08 14:55:19 +03:00
|
|
|
mtd!(ImportSource, visit_import_source);
|
2021-08-20 10:48:08 +03:00
|
|
|
mtd!(ImportRule, visit_import_rule);
|
|
|
|
mtd!(KeyframeBlock, visit_keyframe_block);
|
|
|
|
mtd!(KeyframeBlockRule, visit_keyframe_block_rule);
|
|
|
|
mtd!(KeyframeSelector, visit_keyframe_selector);
|
|
|
|
mtd!(KeyframesRule, visit_keyframes_rule);
|
|
|
|
mtd!(MediaQuery, visit_media_query);
|
|
|
|
mtd!(MediaRule, visit_media_rule);
|
|
|
|
mtd!(NamespaceRule, visit_namespace_rule);
|
|
|
|
mtd!(NestedPageRule, visit_nested_page_rule);
|
|
|
|
mtd!(NotMediaQuery, visit_not_media_query);
|
|
|
|
mtd!(NotSupportQuery, visit_not_support_query);
|
|
|
|
mtd!(OnlyMediaQuery, visit_only_media_query);
|
|
|
|
mtd!(OrMediaQuery, visit_or_media_query);
|
|
|
|
mtd!(OrSupportQuery, visit_or_support_query);
|
|
|
|
mtd!(PageRule, visit_page_rule);
|
|
|
|
mtd!(PageRuleBlock, visit_page_rule_block);
|
|
|
|
mtd!(PageRuleBlockItem, visit_page_rule_block_item);
|
|
|
|
mtd!(PageSelector, visit_page_selector);
|
|
|
|
mtd!(ParenSupportQuery, visit_paren_support_query);
|
|
|
|
mtd!(SupportQuery, visit_support_query);
|
|
|
|
mtd!(SupportsRule, visit_supports_rule);
|
|
|
|
mtd!(UnknownAtRule, visit_unknown_at_rule);
|
|
|
|
mtd!(ViewportRule, visit_viewport_rule);
|
|
|
|
|
|
|
|
fn visit_token_and_span(&mut self, n: &TokenAndSpan, _parent: &dyn swc_css_visit::Node) {
|
|
|
|
self.handler
|
|
|
|
.struct_span_err(n.span, &format!("{:?}", n.token))
|
|
|
|
.emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[testing::fixture("tests/fixture/**/input.css")]
|
|
|
|
fn span(input: PathBuf) {
|
|
|
|
eprintln!("Input: {}", input.display());
|
|
|
|
let dir = input.parent().unwrap().to_path_buf();
|
|
|
|
|
|
|
|
let output = testing::run_test2(false, |cm, handler| {
|
|
|
|
if false {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let fm = cm.load_file(&input).unwrap();
|
|
|
|
let lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut parser = Parser::new(lexer, ParserConfig { parse_values: true });
|
|
|
|
|
|
|
|
let stylesheet = parser.parse_all();
|
|
|
|
|
|
|
|
match stylesheet {
|
|
|
|
Ok(stylesheet) => {
|
|
|
|
stylesheet.visit_with(
|
|
|
|
&Invalid { span: DUMMY_SP },
|
|
|
|
&mut SpanVisualizer { handler: &handler },
|
|
|
|
);
|
|
|
|
|
|
|
|
Err(())
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
let mut d = err.to_diagnostics(&handler);
|
|
|
|
d.note(&format!("current token = {}", parser.dump_cur()));
|
|
|
|
|
|
|
|
d.emit();
|
|
|
|
|
|
|
|
panic!();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap_err();
|
|
|
|
|
|
|
|
output
|
|
|
|
.compare_to_file(&dir.join("span.rust-debug"))
|
|
|
|
.unwrap();
|
|
|
|
}
|
|
|
|
|
2021-08-19 08:16:32 +03:00
|
|
|
#[testing::fixture("tests/errors/**/input.css")]
|
|
|
|
fn fail(input: PathBuf) {
|
|
|
|
eprintln!("Input: {}", input.display());
|
|
|
|
let stderr_path = input.parent().unwrap().join("output.stderr");
|
|
|
|
|
|
|
|
let stderr = testing::run_test2(false, |cm, handler| -> Result<(), _> {
|
|
|
|
let fm = cm.load_file(&input).unwrap();
|
|
|
|
let lexer = Lexer::new(SourceFileInput::from(&*fm));
|
|
|
|
let mut parser = Parser::new(lexer, ParserConfig { parse_values: true });
|
|
|
|
|
|
|
|
let stylesheet = parser.parse_all();
|
|
|
|
|
|
|
|
match stylesheet {
|
|
|
|
Ok(..) => {}
|
|
|
|
Err(err) => {
|
|
|
|
err.to_diagnostics(&handler).emit();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for err in parser.take_errors() {
|
|
|
|
err.to_diagnostics(&handler).emit();
|
|
|
|
}
|
|
|
|
|
|
|
|
if !handler.has_errors() {
|
|
|
|
panic!("should error")
|
|
|
|
}
|
|
|
|
|
|
|
|
Err(())
|
|
|
|
})
|
|
|
|
.unwrap_err();
|
|
|
|
|
|
|
|
stderr.compare_to_file(&stderr_path).unwrap();
|
|
|
|
}
|