LibTest + test-js: Add initial skelaton of LibTest and migrate code there.

The test-js reporter is arguably the nicest test runner / reporter that
exists in the serenity code base. To the goal of leveling up all the
other unit test environments, start a new LibTest library so that we
can share code and reporting utilities to make all the test systems
look and behave similarly.
This commit is contained in:
Brian Gianforcaro 2021-02-28 13:09:13 -08:00 committed by Andreas Kling
parent 0b5ca28475
commit 0a49877fdc
Notes: sideshowbarker 2024-07-18 21:49:20 +09:00
4 changed files with 90 additions and 50 deletions

View File

@ -30,6 +30,7 @@ add_subdirectory(LibSymbolClient)
add_subdirectory(LibSyntax)
add_subdirectory(LibSystem)
add_subdirectory(LibTar)
add_subdirectory(LibTest)
add_subdirectory(LibTextCodec)
add_subdirectory(LibThread)
add_subdirectory(LibTLS)

View File

@ -0,0 +1,2 @@
serenity_install_sources(Libraries/LibTest)

View File

@ -0,0 +1,56 @@
/*
* Copyright (c) 2020, Matthew Olsson <matthewcolsson@gmail.com>
* Copyright (c) 2020, Linus Groh <mail@linusgroh.de>
* Copyright (c) 2021, Brian Gianforcaro <b.gianfo@gmail.com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/String.h>
#include <AK/Vector.h>
namespace Test {
enum class Result {
Pass,
Fail,
Skip,
};
struct Case {
String name;
Result result;
String details;
};
struct Suite {
String name;
// A failed test takes precedence over a skipped test, which both have
// precedence over a passed test
Result most_severe_test_result { Result::Pass };
Vector<Case> tests {};
};
}

View File

@ -40,6 +40,7 @@
#include <LibJS/Runtime/Array.h>
#include <LibJS/Runtime/GlobalObject.h>
#include <LibJS/Runtime/JSONObject.h>
#include <LibTest/Results.h>
#include <signal.h>
#include <stdlib.h>
#include <sys/time.h>
@ -51,26 +52,6 @@ RefPtr<JS::VM> vm;
static bool collect_on_every_allocation = false;
static String currently_running_test;
enum class TestResult {
Pass,
Fail,
Skip,
};
struct JSTest {
String name;
TestResult result;
String details;
};
struct JSSuite {
String name;
// A failed test takes precedence over a skipped test, which both have
// precedence over a passed test
TestResult most_severe_test_result { TestResult::Pass };
Vector<JSTest> tests {};
};
struct ParserError {
JS::Parser::Error error;
String hint;
@ -82,8 +63,8 @@ struct JSFileResult {
double time_taken { 0 };
// A failed test takes precedence over a skipped test, which both have
// precedence over a passed test
TestResult most_severe_test_result { TestResult::Pass };
Vector<JSSuite> suites {};
Test::Result most_severe_test_result { Test::Result::Pass };
Vector<Test::Suite> suites {};
Vector<String> logged_messages {};
};
@ -332,12 +313,12 @@ JSFileResult TestRunner::run_file_test(const String& test_path)
}
test_json.value().as_object().for_each_member([&](const String& suite_name, const JsonValue& suite_value) {
JSSuite suite { suite_name };
Test::Suite suite { suite_name };
VERIFY(suite_value.is_object());
suite_value.as_object().for_each_member([&](const String& test_name, const JsonValue& test_value) {
JSTest test { test_name, TestResult::Fail, "" };
Test::Case test { test_name, Test::Result::Fail, "" };
VERIFY(test_value.is_object());
VERIFY(test_value.as_object().has("result"));
@ -346,32 +327,32 @@ JSFileResult TestRunner::run_file_test(const String& test_path)
VERIFY(result.is_string());
auto result_string = result.as_string();
if (result_string == "pass") {
test.result = TestResult::Pass;
test.result = Test::Result::Pass;
m_counts.tests_passed++;
} else if (result_string == "fail") {
test.result = TestResult::Fail;
test.result = Test::Result::Fail;
m_counts.tests_failed++;
suite.most_severe_test_result = TestResult::Fail;
suite.most_severe_test_result = Test::Result::Fail;
VERIFY(test_value.as_object().has("details"));
auto details = test_value.as_object().get("details");
VERIFY(result.is_string());
test.details = details.as_string();
} else {
test.result = TestResult::Skip;
if (suite.most_severe_test_result == TestResult::Pass)
suite.most_severe_test_result = TestResult::Skip;
test.result = Test::Result::Skip;
if (suite.most_severe_test_result == Test::Result::Pass)
suite.most_severe_test_result = Test::Result::Skip;
m_counts.tests_skipped++;
}
suite.tests.append(test);
});
if (suite.most_severe_test_result == TestResult::Fail) {
if (suite.most_severe_test_result == Test::Result::Fail) {
m_counts.suites_failed++;
file_result.most_severe_test_result = TestResult::Fail;
file_result.most_severe_test_result = Test::Result::Fail;
} else {
if (suite.most_severe_test_result == TestResult::Skip && file_result.most_severe_test_result == TestResult::Pass)
file_result.most_severe_test_result = TestResult::Skip;
if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass)
file_result.most_severe_test_result = Test::Result::Skip;
m_counts.suites_passed++;
}
@ -433,12 +414,12 @@ static void print_modifiers(Vector<Modifier> modifiers)
void TestRunner::print_file_result(const JSFileResult& file_result) const
{
if (file_result.most_severe_test_result == TestResult::Fail || file_result.error.has_value()) {
if (file_result.most_severe_test_result == Test::Result::Fail || file_result.error.has_value()) {
print_modifiers({ BG_RED, FG_BLACK, FG_BOLD });
out(" FAIL ");
print_modifiers({ CLEAR });
} else {
if (m_print_times || file_result.most_severe_test_result != TestResult::Pass) {
if (m_print_times || file_result.most_severe_test_result != Test::Result::Pass) {
print_modifiers({ BG_GREEN, FG_BLACK, FG_BOLD });
out(" PASS ");
print_modifiers({ CLEAR });
@ -495,12 +476,12 @@ void TestRunner::print_file_result(const JSFileResult& file_result) const
return;
}
if (file_result.most_severe_test_result != TestResult::Pass) {
if (file_result.most_severe_test_result != Test::Result::Pass) {
for (auto& suite : file_result.suites) {
if (suite.most_severe_test_result == TestResult::Pass)
if (suite.most_severe_test_result == Test::Result::Pass)
continue;
bool failed = suite.most_severe_test_result == TestResult::Fail;
bool failed = suite.most_severe_test_result == Test::Result::Fail;
print_modifiers({ FG_GRAY, FG_BOLD });
@ -530,12 +511,12 @@ void TestRunner::print_file_result(const JSFileResult& file_result) const
print_modifiers({ CLEAR });
for (auto& test : suite.tests) {
if (test.result == TestResult::Pass)
if (test.result == Test::Result::Pass)
continue;
print_modifiers({ FG_GRAY, FG_BOLD });
out(" Test: ");
if (test.result == TestResult::Fail) {
if (test.result == Test::Result::Fail) {
print_modifiers({ CLEAR, FG_RED });
outln("{} (failed):", test.name);
outln(" {}", test.details);
@ -630,25 +611,25 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
auto start_time = get_time_in_ms();
String details = "";
TestResult test_result;
Test::Result test_result;
if (test_path.ends_with(".module.js")) {
test_result = TestResult::Skip;
test_result = Test::Result::Skip;
m_counts.tests_skipped++;
m_counts.suites_passed++;
} else {
auto parse_result = parse_file(test_path);
if (expecting_file_to_parse) {
if (!parse_result.is_error()) {
test_result = TestResult::Pass;
test_result = Test::Result::Pass;
} else {
test_result = TestResult::Fail;
test_result = Test::Result::Fail;
details = parse_result.error().error.to_string();
}
} else {
if (parse_result.is_error()) {
test_result = TestResult::Pass;
test_result = Test::Result::Pass;
} else {
test_result = TestResult::Fail;
test_result = Test::Result::Fail;
details = "File was expected to produce a parser error but didn't";
}
}
@ -661,8 +642,8 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
// - That way we can show additional information such as "file parsed but shouldn't have" or
// parser errors for files that should parse respectively
JSTest test { expecting_file_to_parse ? "file should parse" : "file should not parse", test_result, details };
JSSuite suite { "Parse file", test_result, { test } };
Test::Case test { expecting_file_to_parse ? "file should parse" : "file should not parse", test_result, details };
Test::Suite suite { "Parse file", test_result, { test } };
JSFileResult file_result {
test_path.substring(m_test_root.length() + 1, test_path.length() - m_test_root.length() - 1),
{},
@ -671,7 +652,7 @@ JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path)
{ suite }
};
if (test_result == TestResult::Fail) {
if (test_result == Test::Result::Fail) {
m_counts.tests_failed++;
m_counts.suites_failed++;
} else {