LibTest: Add support for xfail JavaScriptTestRunner tests

This is meant to be used in a similar manner to skipping tests, with the
extra advantage that if the test begins passing unexpectedly, the test
will fail.

Being notified of unexpected passes allows for the test to be updated to
the correct expectation.
This commit is contained in:
Shannon Booth 2023-07-22 19:39:31 +12:00 committed by Andreas Kling
parent 2c06ad3a05
commit af60c740e3
Notes: sideshowbarker 2024-07-16 23:05:02 +09:00
4 changed files with 32 additions and 4 deletions

View File

@ -3,6 +3,7 @@
* Copyright (c) 2020-2022, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
@ -425,6 +426,11 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path)
auto details = test_value.as_object().get_deprecated_string("details"sv);
VERIFY(result.has_value());
test.details = details.value();
} else if (result_string == "xfail") {
test.result = Test::Result::ExpectedFail;
m_counts.tests_expected_failed++;
if (suite.most_severe_test_result != Test::Result::Fail)
suite.most_severe_test_result = Test::Result::ExpectedFail;
} else {
test.result = Test::Result::Skip;
if (suite.most_severe_test_result == Test::Result::Pass)
@ -443,6 +449,8 @@ inline JSFileResult TestRunner::run_file_test(DeprecatedString const& test_path)
} else {
if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass)
file_result.most_severe_test_result = Test::Result::Skip;
else if (suite.most_severe_test_result == Test::Result::ExpectedFail && (file_result.most_severe_test_result == Test::Result::Pass || file_result.most_severe_test_result == Test::Result::Skip))
file_result.most_severe_test_result = Test::Result::ExpectedFail;
m_counts.suites_passed++;
}
@ -605,6 +613,9 @@ inline void TestRunner::print_file_result(JSFileResult const& file_result) const
print_modifiers({ CLEAR, FG_RED });
outln("{} (failed):", test.name);
outln(" {}", test.details);
} else if (test.result == Test::Result::ExpectedFail) {
print_modifiers({ CLEAR, FG_ORANGE });
outln("{} (expected fail)", test.name);
} else {
print_modifiers({ CLEAR, FG_ORANGE });
outln("{} (skipped)", test.name);

View File

@ -2,6 +2,7 @@
* Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org>
* Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Brian Gianforcaro <bgianf@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
@ -17,6 +18,7 @@ enum class Result {
Pass,
Fail,
Skip,
ExpectedFail,
Crashed,
};
@ -42,6 +44,7 @@ struct Counts {
unsigned tests_failed { 0 };
unsigned tests_passed { 0 };
unsigned tests_skipped { 0 };
unsigned tests_expected_failed { 0 };
unsigned suites_failed { 0 };
unsigned suites_passed { 0 };
unsigned files_total { 0 };

View File

@ -3,6 +3,7 @@
* Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org>
* Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org>
* Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
@ -184,12 +185,17 @@ inline void TestRunner::print_test_results() const
out("{} skipped, ", m_counts.tests_skipped);
print_modifiers({ CLEAR });
}
if (m_counts.tests_expected_failed) {
print_modifiers({ FG_ORANGE });
out("{} expected failed, ", m_counts.tests_expected_failed);
print_modifiers({ CLEAR });
}
if (m_counts.tests_passed) {
print_modifiers({ FG_GREEN });
out("{} passed, ", m_counts.tests_passed);
print_modifiers({ CLEAR });
}
outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed);
outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed + m_counts.tests_expected_failed);
outln("Files: {} total", m_counts.files_total);
@ -227,6 +233,9 @@ inline void TestRunner::print_test_results_as_json() const
case Result::Skip:
result_name = "SKIPPED"sv;
break;
case Result::ExpectedFail:
result_name = "XFAIL"sv;
break;
case Result::Crashed:
result_name = "PROCESS_ERROR"sv;
break;
@ -254,7 +263,8 @@ inline void TestRunner::print_test_results_as_json() const
tests.set("failed", m_counts.tests_failed);
tests.set("passed", m_counts.tests_passed);
tests.set("skipped", m_counts.tests_skipped);
tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped);
tests.set("xfail", m_counts.tests_expected_failed);
tests.set("total", m_counts.tests_failed + m_counts.tests_passed + m_counts.tests_skipped + m_counts.tests_expected_failed);
JsonObject results;
results.set("suites", suites);

View File

@ -120,8 +120,11 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu
case Test::Result::Pass:
++m_counts.tests_passed;
break;
case Test::Result::ExpectedFail:
++m_counts.tests_passed;
break;
case Test::Result::Skip:
++m_counts.tests_skipped;
++m_counts.tests_expected_failed;
break;
case Test::Result::Fail:
++m_counts.tests_failed;
@ -196,7 +199,8 @@ void TestRunner::do_run_single_test(DeprecatedString const& test_path, size_t cu
outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed");
} else {
print_modifiers({ Test::CLEAR, Test::FG_ORANGE });
outln("{} (skipped)", test_result.file_path.basename());
auto const status = test_result.result == Test::Result::Skip ? "skipped"sv : "expected fail"sv;
outln("{} ({})", test_result.file_path.basename(), status);
}
print_modifiers({ Test::CLEAR });
}