ladybird/Userland/Libraries/LibTest/TestSuite.cpp
Andrew Kaster 35c0a6c54d AK+Userland: Move AK/TestSuite.h into LibTest and rework Tests' CMake
As many macros as possible are moved to Macros.h, while the
macros to create a test case are moved to TestCase.h. TestCase is now
the only user-facing header for creating a test case. TestSuite and its
helpers have moved into a .cpp file. Instead of requiring a TEST_MAIN
macro to be instantiated into the test file, a TestMain.cpp file is
provided instead that will be linked against each test. This has the
side effect that, if we wanted to have test cases split across multiple
files, it's as simple as adding them all to the same executable.

The test main should be portable to kernel mode as well, so if
there's a set of tests that should be run in self-test mode in kernel
space, we can accomodate that.

A new serenity_test CMake function streamlines adding a new test with
arguments for the test source file, subdirectory under /usr/Tests to
install the test application and an optional list of libraries to link
against the test application. To accomodate future test where the
provided TestMain.cpp is not suitable (e.g. test-js), a CUSTOM_MAIN
parameter can be passed to the function to not link against the
boilerplate main function.
2021-04-25 09:36:49 +02:00

149 lines
4.1 KiB
C++

/*
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
* Copyright (c) 2021, Andrew Kaster <akaster@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibTest/Macros.h> // intentionally first -- we redefine VERIFY and friends in here
#include <LibCore/ArgsParser.h>
#include <LibTest/TestSuite.h>
#include <stdlib.h>
#include <sys/time.h>
namespace Test {
TestSuite* TestSuite::s_global = nullptr;
class TestElapsedTimer {
public:
TestElapsedTimer() { restart(); }
void restart() { gettimeofday(&m_started, nullptr); }
u64 elapsed_milliseconds()
{
struct timeval now = {};
gettimeofday(&now, nullptr);
struct timeval delta = {};
timersub(&now, &m_started, &delta);
return delta.tv_sec * 1000 + delta.tv_usec / 1000;
}
private:
struct timeval m_started = {};
};
// Declared in Macros.h
void current_test_case_did_fail()
{
TestSuite::the().current_test_case_did_fail();
}
// Declared in TestCase.h
void add_test_case_to_suite(const NonnullRefPtr<TestCase>& test_case)
{
TestSuite::the().add_case(test_case);
}
int TestSuite::main(const String& suite_name, int argc, char** argv)
{
m_suite_name = suite_name;
Core::ArgsParser args_parser;
bool do_tests_only = getenv("TESTS_ONLY") != nullptr;
bool do_benchmarks_only = false;
bool do_list_cases = false;
const char* search_string = "*";
args_parser.add_option(do_tests_only, "Only run tests.", "tests", 0);
args_parser.add_option(do_benchmarks_only, "Only run benchmarks.", "bench", 0);
args_parser.add_option(do_list_cases, "List available test cases.", "list", 0);
args_parser.add_positional_argument(search_string, "Only run matching cases.", "pattern", Core::ArgsParser::Required::No);
args_parser.parse(argc, argv);
const auto& matching_tests = find_cases(search_string, !do_benchmarks_only, !do_tests_only);
if (do_list_cases) {
outln("Available cases for {}:", suite_name);
for (const auto& test : matching_tests) {
outln(" {}", test.name());
}
return 0;
}
outln("Running {} cases out of {}.", matching_tests.size(), m_cases.size());
return run(matching_tests);
}
NonnullRefPtrVector<TestCase> TestSuite::find_cases(const String& search, bool find_tests, bool find_benchmarks)
{
NonnullRefPtrVector<TestCase> matches;
for (const auto& t : m_cases) {
if (!search.is_empty() && !t.name().matches(search, CaseSensitivity::CaseInsensitive)) {
continue;
}
if (!find_tests && !t.is_benchmark()) {
continue;
}
if (!find_benchmarks && t.is_benchmark()) {
continue;
}
matches.append(t);
}
return matches;
}
int TestSuite::run(const NonnullRefPtrVector<TestCase>& tests)
{
size_t test_count = 0;
size_t test_failed_count = 0;
size_t benchmark_count = 0;
TestElapsedTimer global_timer;
for (const auto& t : tests) {
const auto test_type = t.is_benchmark() ? "benchmark" : "test";
warnln("Running {} '{}'.", test_type, t.name());
m_current_test_case_passed = true;
TestElapsedTimer timer;
t.func()();
const auto time = timer.elapsed_milliseconds();
dbgln("{} {} '{}' in {}ms", m_current_test_case_passed ? "Completed" : "Failed", test_type, t.name(), time);
if (t.is_benchmark()) {
m_benchtime += time;
benchmark_count++;
} else {
m_testtime += time;
test_count++;
}
if (!m_current_test_case_passed) {
test_failed_count++;
}
}
dbgln("Finished {} tests and {} benchmarks in {}ms ({}ms tests, {}ms benchmarks, {}ms other).",
test_count,
benchmark_count,
global_timer.elapsed_milliseconds(),
m_testtime,
m_benchtime,
global_timer.elapsed_milliseconds() - (m_testtime + m_benchtime));
dbgln("Out of {} tests, {} passed and {} failed.", test_count, test_count - test_failed_count, test_failed_count);
return (int)test_failed_count;
}
}