diff options
author | Ali Mohammad Pur <ali.mpfard@gmail.com> | 2021-05-18 18:45:12 +0430 |
---|---|---|
committer | Linus Groh <mail@linusgroh.de> | 2021-05-18 18:48:15 +0100 |
commit | f137c1bfaac5714197dcba58c0f1f08d2761d133 (patch) | |
tree | 00ce1a3cd5d6f690e65b595863e1828fc4818c69 | |
parent | b6e5c76427f88358390582b2def2713e05cde1d6 (diff) | |
download | serenity-f137c1bfaac5714197dcba58c0f1f08d2761d133.zip |
LibJS+LibTest: Move out the test-js test runner into LibTest
-rw-r--r-- | Meta/CMake/utils.cmake | 23 | ||||
-rw-r--r-- | Meta/Lagom/CMakeLists.txt | 4 | ||||
-rw-r--r-- | Tests/LibJS/CMakeLists.txt | 3 | ||||
-rw-r--r-- | Tests/LibJS/test-js.cpp | 732 | ||||
-rw-r--r-- | Userland/Libraries/LibTest/JavaScriptTestRunner.h | 591 | ||||
-rw-r--r-- | Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp | 156 |
6 files changed, 776 insertions, 733 deletions
diff --git a/Meta/CMake/utils.cmake b/Meta/CMake/utils.cmake index a0c170e5c7..6cadb1879e 100644 --- a/Meta/CMake/utils.cmake +++ b/Meta/CMake/utils.cmake @@ -70,10 +70,14 @@ function(serenity_bin target_name) endfunction() function(serenity_test test_src sub_dir) - cmake_parse_arguments(SERENITY_TEST "CUSTOM_MAIN" "" "LIBS" ${ARGN}) + cmake_parse_arguments(SERENITY_TEST "MAIN_ALREADY_DEFINED" "CUSTOM_MAIN" "LIBS" ${ARGN}) set(TEST_SOURCES ${test_src}) - if (NOT ${SERENITY_TEST_CUSTOM_MAIN}) - list(APPEND TEST_SOURCES "${CMAKE_SOURCE_DIR}/Userland/Libraries/LibTest/TestMain.cpp") + if ("${SERENITY_TEST_CUSTOM_MAIN}" STREQUAL "") + set(SERENITY_TEST_CUSTOM_MAIN + "${CMAKE_SOURCE_DIR}/Userland/Libraries/LibTest/TestMain.cpp") + endif() + if (NOT ${SERENITY_TEST_MAIN_ALREADY_DEFINED}) + list(PREPEND TEST_SOURCES "${SERENITY_TEST_CUSTOM_MAIN}") endif() get_filename_component(test_name ${test_src} NAME_WE) add_executable(${test_name} ${TEST_SOURCES}) @@ -84,6 +88,19 @@ function(serenity_test test_src sub_dir) install(TARGETS ${test_name} RUNTIME DESTINATION usr/Tests/${sub_dir}) endfunction() + +function(serenity_testjs_test test_src sub_dir) + cmake_parse_arguments(SERENITY_TEST "" "CUSTOM_MAIN" "LIBS" ${ARGN}) + if ("${SERENITY_TEST_CUSTOM_MAIN}" STREQUAL "") + set(SERENITY_TEST_CUSTOM_MAIN + "${CMAKE_SOURCE_DIR}/Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp") + endif() + list(APPEND SERENITY_TEST_LIBS LibJS LibCore) + serenity_test(${test_src} ${sub_dir} + CUSTOM_MAIN "${SERENITY_TEST_CUSTOM_MAIN}" + LIBS ${SERENITY_TEST_LIBS}) +endfunction() + function(serenity_app target_name) cmake_parse_arguments(SERENITY_APP "" "ICON" "" ${ARGN}) diff --git a/Meta/Lagom/CMakeLists.txt b/Meta/Lagom/CMakeLists.txt index 52fb4346aa..b071468b2f 100644 --- a/Meta/Lagom/CMakeLists.txt +++ b/Meta/Lagom/CMakeLists.txt @@ -132,7 +132,9 @@ if (BUILD_LAGOM) set_target_properties(ntpquery_lagom PROPERTIES OUTPUT_NAME ntpquery) target_link_libraries(ntpquery_lagom Lagom) - add_executable(test-js_lagom ../../Tests/LibJS/test-js.cpp) + add_executable(test-js_lagom + ../../Tests/LibJS/test-js.cpp + ../../Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp) set_target_properties(test-js_lagom PROPERTIES OUTPUT_NAME test-js) target_link_libraries(test-js_lagom Lagom) target_link_libraries(test-js_lagom stdc++) diff --git a/Tests/LibJS/CMakeLists.txt b/Tests/LibJS/CMakeLists.txt index 14a5c6d031..8d425f8650 100644 --- a/Tests/LibJS/CMakeLists.txt +++ b/Tests/LibJS/CMakeLists.txt @@ -1,3 +1,2 @@ -add_executable(test-js test-js.cpp) -target_link_libraries(test-js LibJS LibLine LibCore) +serenity_testjs_test(test-js.cpp test-js) install(TARGETS test-js RUNTIME DESTINATION bin) diff --git a/Tests/LibJS/test-js.cpp b/Tests/LibJS/test-js.cpp index 095e927584..129c2149e4 100644 --- a/Tests/LibJS/test-js.cpp +++ b/Tests/LibJS/test-js.cpp @@ -5,135 +5,16 @@ * SPDX-License-Identifier: BSD-2-Clause */ -#include <AK/ByteBuffer.h> -#include <AK/JsonObject.h> -#include <AK/JsonValue.h> -#include <AK/LexicalPath.h> -#include <AK/QuickSort.h> -#include <LibCore/ArgsParser.h> -#include <LibCore/DirIterator.h> -#include <LibCore/File.h> -#include <LibJS/Interpreter.h> -#include <LibJS/Lexer.h> -#include <LibJS/Parser.h> -#include <LibJS/Runtime/Array.h> -#include <LibJS/Runtime/GlobalObject.h> -#include <LibJS/Runtime/JSONObject.h> -#include <LibTest/Results.h> -#include <signal.h> -#include <stdlib.h> -#include <sys/time.h> -#include <unistd.h> +#include <LibTest/JavaScriptTestRunner.h> -#define TOP_LEVEL_TEST_NAME "__$$TOP_LEVEL$$__" +TEST_ROOT("Userland/Libraries/LibJS/Tests"); -RefPtr<JS::VM> vm; - -static bool collect_on_every_allocation = false; -static String currently_running_test; - -struct ParserError { - JS::Parser::Error error; - String hint; -}; - -struct JSFileResult { - String name; - Optional<ParserError> error {}; - double time_taken { 0 }; - // A failed test takes precedence over a skipped test, which both have - // precedence over a passed test - Test::Result most_severe_test_result { Test::Result::Pass }; - Vector<Test::Suite> suites {}; - Vector<String> logged_messages {}; -}; - -class TestRunnerGlobalObject final : public JS::GlobalObject { - JS_OBJECT(TestRunnerGlobalObject, JS::GlobalObject); - -public: - TestRunnerGlobalObject(); - virtual ~TestRunnerGlobalObject() override; - - virtual void initialize_global_object() override; - -private: - JS_DECLARE_NATIVE_FUNCTION(is_strict_mode); - JS_DECLARE_NATIVE_FUNCTION(can_parse_source); - JS_DECLARE_NATIVE_FUNCTION(run_queued_promise_jobs); -}; - -class TestRunner { -public: - static TestRunner* the() - { - return s_the; - } - - TestRunner(String test_root, bool print_times, bool print_progress) - : m_test_root(move(test_root)) - , m_print_times(print_times) - , m_print_progress(print_progress) - { - VERIFY(!s_the); - s_the = this; - } - - virtual ~TestRunner() { s_the = nullptr; }; - - Test::Counts run(); - - const Test::Counts& counts() const { return m_counts; } - - bool is_printing_progress() const { return m_print_progress; } - -protected: - static TestRunner* s_the; - - virtual Vector<String> get_test_paths() const; - virtual JSFileResult run_file_test(const String& test_path); - void print_file_result(const JSFileResult& file_result) const; - void print_test_results() const; - - String m_test_root; - bool m_print_times; - bool m_print_progress; - - double m_total_elapsed_time_in_ms { 0 }; - Test::Counts m_counts; - - RefPtr<JS::Program> m_test_program; -}; - -TestRunner* TestRunner::s_the = nullptr; - -TestRunnerGlobalObject::TestRunnerGlobalObject() -{ -} - -TestRunnerGlobalObject::~TestRunnerGlobalObject() -{ -} - -void TestRunnerGlobalObject::initialize_global_object() -{ - Base::initialize_global_object(); - static FlyString global_property_name { "global" }; - static FlyString is_strict_mode_property_name { "isStrictMode" }; - static FlyString can_parse_source_property_name { "canParseSource" }; - static FlyString run_queued_promise_jobs_property_name { "runQueuedPromiseJobs" }; - define_property(global_property_name, this, JS::Attribute::Enumerable); - define_native_function(is_strict_mode_property_name, is_strict_mode); - define_native_function(can_parse_source_property_name, can_parse_source); - define_native_function(run_queued_promise_jobs_property_name, run_queued_promise_jobs); -} - -JS_DEFINE_NATIVE_FUNCTION(TestRunnerGlobalObject::is_strict_mode) +TESTJS_GLOBAL_FUNCTION(is_strict_mode, isStrictMode, 0) { return JS::Value(vm.in_strict_mode()); } -JS_DEFINE_NATIVE_FUNCTION(TestRunnerGlobalObject::can_parse_source) +TESTJS_GLOBAL_FUNCTION(can_parse_source, canParseSource) { auto source = vm.argument(0).to_string(global_object); if (vm.exception()) @@ -143,611 +24,8 @@ JS_DEFINE_NATIVE_FUNCTION(TestRunnerGlobalObject::can_parse_source) return JS::Value(!parser.has_errors()); } -JS_DEFINE_NATIVE_FUNCTION(TestRunnerGlobalObject::run_queued_promise_jobs) +TESTJS_GLOBAL_FUNCTION(run_queued_promise_jobs, runQueuedPromiseJobs) { vm.run_queued_promise_jobs(); return JS::js_undefined(); } - -static void cleanup_and_exit() -{ - // Clear the taskbar progress. - if (TestRunner::the() && TestRunner::the()->is_printing_progress()) - warn("\033]9;-1;\033\\"); - exit(1); -} - -static void handle_sigabrt(int) -{ - dbgln("test-js: SIGABRT received, cleaning up."); - cleanup_and_exit(); -} - -static double get_time_in_ms() -{ - struct timeval tv1; - auto return_code = gettimeofday(&tv1, nullptr); - VERIFY(return_code >= 0); - return static_cast<double>(tv1.tv_sec) * 1000.0 + static_cast<double>(tv1.tv_usec) / 1000.0; -} - -template<typename Callback> -static void iterate_directory_recursively(const String& directory_path, Callback callback) -{ - Core::DirIterator directory_iterator(directory_path, Core::DirIterator::Flags::SkipDots); - - while (directory_iterator.has_next()) { - auto file_path = directory_iterator.next_full_path(); - if (Core::File::is_directory(file_path)) { - iterate_directory_recursively(file_path, callback); - } else { - callback(move(file_path)); - } - } -} - -Vector<String> TestRunner::get_test_paths() const -{ - Vector<String> paths; - iterate_directory_recursively(m_test_root, [&](const String& file_path) { - if (!file_path.ends_with("test-common.js")) - paths.append(file_path); - }); - quick_sort(paths); - return paths; -} - -Test::Counts TestRunner::run() -{ - size_t progress_counter = 0; - auto test_paths = get_test_paths(); - for (auto& path : test_paths) { - ++progress_counter; - print_file_result(run_file_test(path)); - if (m_print_progress) - warn("\033]9;{};{};\033\\", progress_counter, test_paths.size()); - } - - if (m_print_progress) - warn("\033]9;-1;\033\\"); - - print_test_results(); - - return m_counts; -} - -static Result<NonnullRefPtr<JS::Program>, ParserError> parse_file(const String& file_path) -{ - auto file = Core::File::construct(file_path); - auto result = file->open(Core::OpenMode::ReadOnly); - if (!result) { - warnln("Failed to open the following file: \"{}\"", file_path); - cleanup_and_exit(); - } - - auto contents = file->read_all(); - String test_file_string(reinterpret_cast<const char*>(contents.data()), contents.size()); - file->close(); - - auto parser = JS::Parser(JS::Lexer(test_file_string)); - auto program = parser.parse_program(); - - if (parser.has_errors()) { - auto error = parser.errors()[0]; - return Result<NonnullRefPtr<JS::Program>, ParserError>(ParserError { error, error.source_location_hint(test_file_string) }); - } - - return Result<NonnullRefPtr<JS::Program>, ParserError>(program); -} - -static Optional<JsonValue> get_test_results(JS::Interpreter& interpreter) -{ - auto result = vm->get_variable("__TestResults__", interpreter.global_object()); - auto json_string = JS::JSONObject::stringify_impl(interpreter.global_object(), result, JS::js_undefined(), JS::js_undefined()); - - auto json = JsonValue::from_string(json_string); - if (!json.has_value()) - return {}; - - return json.value(); -} - -JSFileResult TestRunner::run_file_test(const String& test_path) -{ - currently_running_test = test_path; - - double start_time = get_time_in_ms(); - auto interpreter = JS::Interpreter::create<TestRunnerGlobalObject>(*vm); - - // FIXME: This is a hack while we're refactoring Interpreter/VM stuff. - JS::VM::InterpreterExecutionScope scope(*interpreter); - - interpreter->heap().set_should_collect_on_every_allocation(collect_on_every_allocation); - - if (!m_test_program) { - auto result = parse_file(String::formatted("{}/test-common.js", m_test_root)); - if (result.is_error()) { - warnln("Unable to parse test-common.js"); - warnln("{}", result.error().error.to_string()); - warnln("{}", result.error().hint); - cleanup_and_exit(); - } - m_test_program = result.value(); - } - - interpreter->run(interpreter->global_object(), *m_test_program); - - auto file_program = parse_file(test_path); - if (file_program.is_error()) - return { test_path, file_program.error() }; - interpreter->run(interpreter->global_object(), *file_program.value()); - - auto test_json = get_test_results(*interpreter); - if (!test_json.has_value()) { - warnln("Received malformed JSON from test \"{}\"", test_path); - cleanup_and_exit(); - } - - JSFileResult file_result { test_path.substring(m_test_root.length() + 1, test_path.length() - m_test_root.length() - 1) }; - - // Collect logged messages - auto& arr = interpreter->vm().get_variable("__UserOutput__", interpreter->global_object()).as_array(); - for (auto& entry : arr.indexed_properties()) { - auto message = entry.value_and_attributes(&interpreter->global_object()).value; - file_result.logged_messages.append(message.to_string_without_side_effects()); - } - - test_json.value().as_object().for_each_member([&](const String& suite_name, const JsonValue& suite_value) { - Test::Suite suite { suite_name }; - - VERIFY(suite_value.is_object()); - - suite_value.as_object().for_each_member([&](const String& test_name, const JsonValue& test_value) { - Test::Case test { test_name, Test::Result::Fail, "" }; - - VERIFY(test_value.is_object()); - VERIFY(test_value.as_object().has("result")); - - auto result = test_value.as_object().get("result"); - VERIFY(result.is_string()); - auto result_string = result.as_string(); - if (result_string == "pass") { - test.result = Test::Result::Pass; - m_counts.tests_passed++; - } else if (result_string == "fail") { - test.result = Test::Result::Fail; - m_counts.tests_failed++; - suite.most_severe_test_result = Test::Result::Fail; - VERIFY(test_value.as_object().has("details")); - auto details = test_value.as_object().get("details"); - VERIFY(result.is_string()); - test.details = details.as_string(); - } else { - test.result = Test::Result::Skip; - if (suite.most_severe_test_result == Test::Result::Pass) - suite.most_severe_test_result = Test::Result::Skip; - m_counts.tests_skipped++; - } - - suite.tests.append(test); - }); - - if (suite.most_severe_test_result == Test::Result::Fail) { - m_counts.suites_failed++; - file_result.most_severe_test_result = Test::Result::Fail; - } else { - if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass) - file_result.most_severe_test_result = Test::Result::Skip; - m_counts.suites_passed++; - } - - file_result.suites.append(suite); - }); - - m_counts.files_total++; - - file_result.time_taken = get_time_in_ms() - start_time; - m_total_elapsed_time_in_ms += file_result.time_taken; - - return file_result; -} - -enum Modifier { - BG_RED, - BG_GREEN, - FG_RED, - FG_GREEN, - FG_ORANGE, - FG_GRAY, - FG_BLACK, - FG_BOLD, - ITALIC, - CLEAR, -}; - -static void print_modifiers(Vector<Modifier> modifiers) -{ - for (auto& modifier : modifiers) { - auto code = [&] { - switch (modifier) { - case BG_RED: - return "\033[48;2;255;0;102m"; - case BG_GREEN: - return "\033[48;2;102;255;0m"; - case FG_RED: - return "\033[38;2;255;0;102m"; - case FG_GREEN: - return "\033[38;2;102;255;0m"; - case FG_ORANGE: - return "\033[38;2;255;102;0m"; - case FG_GRAY: - return "\033[38;2;135;139;148m"; - case FG_BLACK: - return "\033[30m"; - case FG_BOLD: - return "\033[1m"; - case ITALIC: - return "\033[3m"; - case CLEAR: - return "\033[0m"; - } - VERIFY_NOT_REACHED(); - }(); - out("{}", code); - } -} - -void TestRunner::print_file_result(const JSFileResult& file_result) const -{ - if (file_result.most_severe_test_result == Test::Result::Fail || file_result.error.has_value()) { - print_modifiers({ BG_RED, FG_BLACK, FG_BOLD }); - out(" FAIL "); - print_modifiers({ CLEAR }); - } else { - if (m_print_times || file_result.most_severe_test_result != Test::Result::Pass) { - print_modifiers({ BG_GREEN, FG_BLACK, FG_BOLD }); - out(" PASS "); - print_modifiers({ CLEAR }); - } else { - return; - } - } - - out(" {}", file_result.name); - - if (m_print_times) { - print_modifiers({ CLEAR, ITALIC, FG_GRAY }); - if (file_result.time_taken < 1000) { - outln(" ({}ms)", static_cast<int>(file_result.time_taken)); - } else { - outln(" ({:3}s)", file_result.time_taken / 1000.0); - } - print_modifiers({ CLEAR }); - } else { - outln(); - } - - if (!file_result.logged_messages.is_empty()) { - print_modifiers({ FG_GRAY, FG_BOLD }); -#ifdef __serenity__ - outln(" âš Console output:"); -#else - // This emoji has a second invisible byte after it. The one above does not - outln(" âšī¸ Console output:"); -#endif - print_modifiers({ CLEAR, FG_GRAY }); - for (auto& message : file_result.logged_messages) - outln(" {}", message); - } - - if (file_result.error.has_value()) { - auto test_error = file_result.error.value(); - - print_modifiers({ FG_RED }); -#ifdef __serenity__ - outln(" â The file failed to parse"); -#else - // No invisible byte here, but the spacing still needs to be altered on the host - outln(" â The file failed to parse"); -#endif - outln(); - print_modifiers({ FG_GRAY }); - for (auto& message : test_error.hint.split('\n', true)) { - outln(" {}", message); - } - print_modifiers({ FG_RED }); - outln(" {}", test_error.error.to_string()); - outln(); - return; - } - - if (file_result.most_severe_test_result != Test::Result::Pass) { - for (auto& suite : file_result.suites) { - if (suite.most_severe_test_result == Test::Result::Pass) - continue; - - bool failed = suite.most_severe_test_result == Test::Result::Fail; - - print_modifiers({ FG_GRAY, FG_BOLD }); - - if (failed) { -#ifdef __serenity__ - out(" â Suite: "); -#else - // No invisible byte here, but the spacing still needs to be altered on the host - out(" â Suite: "); -#endif - } else { -#ifdef __serenity__ - out(" â Suite: "); -#else - // This emoji has a second invisible byte after it. The one above does not - out(" â ī¸ Suite: "); -#endif - } - - print_modifiers({ CLEAR, FG_GRAY }); - - if (suite.name == TOP_LEVEL_TEST_NAME) { - outln("<top-level>"); - } else { - outln("{}", suite.name); - } - print_modifiers({ CLEAR }); - - for (auto& test : suite.tests) { - if (test.result == Test::Result::Pass) - continue; - - print_modifiers({ FG_GRAY, FG_BOLD }); - out(" Test: "); - if (test.result == Test::Result::Fail) { - print_modifiers({ CLEAR, FG_RED }); - outln("{} (failed):", test.name); - outln(" {}", test.details); - } else { - print_modifiers({ CLEAR, FG_ORANGE }); - outln("{} (skipped)", test.name); - } - print_modifiers({ CLEAR }); - } - } - } -} - -void TestRunner::print_test_results() const -{ - out("\nTest Suites: "); - if (m_counts.suites_failed) { - print_modifiers({ FG_RED }); - out("{} failed, ", m_counts.suites_failed); - print_modifiers({ CLEAR }); - } - if (m_counts.suites_passed) { - print_modifiers({ FG_GREEN }); - out("{} passed, ", m_counts.suites_passed); - print_modifiers({ CLEAR }); - } - outln("{} total", m_counts.suites_failed + m_counts.suites_passed); - - out("Tests: "); - if (m_counts.tests_failed) { - print_modifiers({ FG_RED }); - out("{} failed, ", m_counts.tests_failed); - print_modifiers({ CLEAR }); - } - if (m_counts.tests_skipped) { - print_modifiers({ FG_ORANGE }); - out("{} skipped, ", m_counts.tests_skipped); - print_modifiers({ CLEAR }); - } - if (m_counts.tests_passed) { - print_modifiers({ FG_GREEN }); - out("{} passed, ", m_counts.tests_passed); - print_modifiers({ CLEAR }); - } - outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed); - - outln("Files: {} total", m_counts.files_total); - - out("Time: "); - if (m_total_elapsed_time_in_ms < 1000.0) { - outln("{}ms", static_cast<int>(m_total_elapsed_time_in_ms)); - } else { - outln("{:>.3}s", m_total_elapsed_time_in_ms / 1000.0); - } - outln(); -} - -class Test262ParserTestRunner final : public TestRunner { -public: - using TestRunner::TestRunner; - -private: - virtual Vector<String> get_test_paths() const override; - virtual JSFileResult run_file_test(const String& test_path) override; -}; - -Vector<String> Test262ParserTestRunner::get_test_paths() const -{ - Vector<String> paths; - iterate_directory_recursively(m_test_root, [&](const String& file_path) { - auto dirname = LexicalPath(file_path).dirname(); - if (dirname.ends_with("early") || dirname.ends_with("fail") || dirname.ends_with("pass") || dirname.ends_with("pass-explicit")) - paths.append(file_path); - }); - quick_sort(paths); - return paths; -} - -JSFileResult Test262ParserTestRunner::run_file_test(const String& test_path) -{ - currently_running_test = test_path; - - auto dirname = LexicalPath(test_path).dirname(); - bool expecting_file_to_parse; - if (dirname.ends_with("early") || dirname.ends_with("fail")) { - expecting_file_to_parse = false; - } else if (dirname.ends_with("pass") || dirname.ends_with("pass-explicit")) { - expecting_file_to_parse = true; - } else { - VERIFY_NOT_REACHED(); - } - - auto start_time = get_time_in_ms(); - String details = ""; - Test::Result test_result; - if (test_path.ends_with(".module.js")) { - test_result = Test::Result::Skip; - m_counts.tests_skipped++; - m_counts.suites_passed++; - } else { - auto parse_result = parse_file(test_path); - if (expecting_file_to_parse) { - if (!parse_result.is_error()) { - test_result = Test::Result::Pass; - } else { - test_result = Test::Result::Fail; - details = parse_result.error().error.to_string(); - } - } else { - if (parse_result.is_error()) { - test_result = Test::Result::Pass; - } else { - test_result = Test::Result::Fail; - details = "File was expected to produce a parser error but didn't"; - } - } - } - - // test262-parser-tests doesn't have "suites" and "tests" in the usual sense, it just has files - // and an expectation whether they should parse or not. We add one suite with one test nonetheless: - // - // - This makes interpreting skipped test easier as their file is shown as "PASS" - // - That way we can show additional information such as "file parsed but shouldn't have" or - // parser errors for files that should parse respectively - - Test::Case test { expecting_file_to_parse ? "file should parse" : "file should not parse", test_result, details }; - Test::Suite suite { "Parse file", test_result, { test } }; - JSFileResult file_result { - test_path.substring(m_test_root.length() + 1, test_path.length() - m_test_root.length() - 1), - {}, - get_time_in_ms() - start_time, - test_result, - { suite } - }; - - if (test_result == Test::Result::Fail) { - m_counts.tests_failed++; - m_counts.suites_failed++; - } else { - m_counts.tests_passed++; - m_counts.suites_passed++; - } - m_counts.files_total++; - m_total_elapsed_time_in_ms += file_result.time_taken; - - return file_result; -} - -int main(int argc, char** argv) -{ - struct sigaction act; - memset(&act, 0, sizeof(act)); - act.sa_flags = SA_NOCLDWAIT; - act.sa_handler = handle_sigabrt; - int rc = sigaction(SIGABRT, &act, nullptr); - if (rc < 0) { - perror("sigaction"); - return 1; - } - -#ifdef SIGINFO - signal(SIGINFO, [](int) { - static char buffer[4096]; - auto& counts = TestRunner::the()->counts(); - int len = snprintf(buffer, sizeof(buffer), "Pass: %d, Fail: %d, Skip: %d\nCurrent test: %s\n", counts.tests_passed, counts.tests_failed, counts.tests_skipped, currently_running_test.characters()); - write(STDOUT_FILENO, buffer, len); - }); -#endif - - bool print_times = false; - bool print_progress = -#ifdef __serenity__ - true; // Use OSC 9 to print progress -#else - false; -#endif - bool test262_parser_tests = false; - const char* specified_test_root = nullptr; - - Core::ArgsParser args_parser; - args_parser.add_option(print_times, "Show duration of each test", "show-time", 't'); - args_parser.add_option(Core::ArgsParser::Option { - .requires_argument = true, - .help_string = "Show progress with OSC 9 (true, false)", - .long_name = "show-progress", - .short_name = 'p', - .accept_value = [&](auto* str) { - if (StringView { "true" } == str) - print_progress = true; - else if (StringView { "false" } == str) - print_progress = false; - else - return false; - return true; - }, - }); - args_parser.add_option(collect_on_every_allocation, "Collect garbage after every allocation", "collect-often", 'g'); - args_parser.add_option(test262_parser_tests, "Run test262 parser tests", "test262-parser-tests", 0); - args_parser.add_positional_argument(specified_test_root, "Tests root directory", "path", Core::ArgsParser::Required::No); - args_parser.parse(argc, argv); - - if (test262_parser_tests) { - if (collect_on_every_allocation) { - warnln("--collect-often and --test262-parser-tests options must not be used together"); - return 1; - } - if (!specified_test_root) { - warnln("Test root is required with --test262-parser-tests"); - return 1; - } - } - - if (getenv("DISABLE_DBG_OUTPUT")) { - AK::set_debug_enabled(false); - } - - String test_root; - - if (specified_test_root) { - test_root = String { specified_test_root }; - } else { -#ifdef __serenity__ - test_root = "/home/anon/js-tests"; -#else - char* serenity_source_dir = getenv("SERENITY_SOURCE_DIR"); - if (!serenity_source_dir) { - warnln("No test root given, test-js requires the SERENITY_SOURCE_DIR environment variable to be set"); - return 1; - } - test_root = String::formatted("{}/Userland/Libraries/LibJS/Tests", serenity_source_dir); -#endif - } - if (!Core::File::is_directory(test_root)) { - warnln("Test root is not a directory: {}", test_root); - return 1; - } - - vm = JS::VM::create(); - - Test::Counts result_counts; - if (test262_parser_tests) - result_counts = Test262ParserTestRunner(test_root, print_times, print_progress).run(); - else - result_counts = TestRunner(test_root, print_times, print_progress).run(); - - vm = nullptr; - - return result_counts.tests_failed > 0 ? 1 : 0; -} diff --git a/Userland/Libraries/LibTest/JavaScriptTestRunner.h b/Userland/Libraries/LibTest/JavaScriptTestRunner.h new file mode 100644 index 0000000000..1aa788badf --- /dev/null +++ b/Userland/Libraries/LibTest/JavaScriptTestRunner.h @@ -0,0 +1,591 @@ +/* + * Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org> + * Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org> + * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org> + * + * SPDX-License-Identifier: BSD-2-Clause + */ + +#pragma once + +#include <AK/ByteBuffer.h> +#include <AK/JsonObject.h> +#include <AK/JsonValue.h> +#include <AK/LexicalPath.h> +#include <AK/QuickSort.h> +#include <AK/Result.h> +#include <AK/Tuple.h> +#include <LibCore/ArgsParser.h> +#include <LibCore/DirIterator.h> +#include <LibCore/File.h> +#include <LibJS/Interpreter.h> +#include <LibJS/Lexer.h> +#include <LibJS/Parser.h> +#include <LibJS/Runtime/Array.h> +#include <LibJS/Runtime/GlobalObject.h> +#include <LibJS/Runtime/JSONObject.h> +#include <LibJS/Runtime/TypedArray.h> +#include <LibTest/Results.h> +#include <sys/time.h> +#include <unistd.h> + +#define STRCAT(x, y) __STRCAT(x, y) +#define STRSTRCAT(x, y) __STRSTRCAT(x, y) +#define __STRCAT(x, y) x #y +#define __STRSTRCAT(x, y) x y + +// Note: This is a little weird, so here's an explanation: +// If the vararg isn't given, the tuple initializer will simply expand to `fn, ::Test::JS::__testjs_last<1>()` +// and if it _is_ given (say as `A`), the tuple initializer will expand to `fn, ::Test::JS::__testjs_last<1, A>()`, which will end up being evaluated as `A` +// and if multiple args are given, the static_assert will be sad. +#define __TESTJS_REGISTER_GLOBAL_FUNCTION(name, fn, ...) \ + struct __TestJS_register_##fn { \ + static_assert( \ + ::Test::JS::__testjs_count(__VA_ARGS__) <= 1, \ + STRCAT(STRSTRCAT(STRCAT("Expected at most three arguments to TESTJS_GLOBAL_FUNCTION at line", __LINE__), ", in file "), __FILE__)); \ + __TestJS_register_##fn() noexcept \ + { \ + ::Test::JS::s_exposed_global_functions.set( \ + name, \ + { fn, ::Test::JS::__testjs_last<1, ##__VA_ARGS__>() }); \ + } \ + } __testjs_register_##fn {}; + +#define TESTJS_GLOBAL_FUNCTION(function, exposed_name, ...) \ + JS_DECLARE_NATIVE_FUNCTION(function); \ + __TESTJS_REGISTER_GLOBAL_FUNCTION(#exposed_name, function, ##__VA_ARGS__); \ + JS_DEFINE_NATIVE_FUNCTION(function) + +#define TESTJS_MAIN_HOOK() \ + struct __TestJS_main_hook { \ + __TestJS_main_hook() \ + { \ + ::Test::JS::g_main_hook = hook; \ + } \ + static void hook(); \ + } __testjs_common_register_##name {}; \ + void __TestJS_main_hook::hook() + +#define TEST_ROOT(path) \ + String Test::JS::g_test_root_fragment = path + +namespace Test::JS { + +namespace JS = ::JS; + +template<typename... Args> +static consteval size_t __testjs_count(Args...) { return sizeof...(Args); } + +template<auto... Values> +static consteval size_t __testjs_last() { return (AK::Detail::IntegralConstant<size_t, Values> {}, ...).value; } + +static constexpr auto TOP_LEVEL_TEST_NAME = "__$$TOP_LEVEL$$__"; +extern RefPtr<JS::VM> g_vm; +extern bool g_collect_on_every_allocation; +extern String g_currently_running_test; +extern String g_test_glob; +struct FunctionWithLength { + JS::Value (*function)(JS::VM&, JS::GlobalObject&); + size_t length { 0 }; +}; +extern HashMap<String, FunctionWithLength> s_exposed_global_functions; +extern String g_test_root_fragment; +extern String g_test_root; +extern int g_test_argc; +extern char** g_test_argv; +extern Function<void()> g_main_hook; + +struct ParserError { + JS::Parser::Error error; + String hint; +}; + +struct JSFileResult { + String name; + Optional<ParserError> error {}; + double time_taken { 0 }; + // A failed test takes precedence over a skipped test, which both have + // precedence over a passed test + Test::Result most_severe_test_result { Test::Result::Pass }; + Vector<Test::Suite> suites {}; + Vector<String> logged_messages {}; +}; + +class TestRunner { +public: + static TestRunner* the() + { + return s_the; + } + + TestRunner(String test_root, String common_path, bool print_times, bool print_progress) + : m_common_path(move(common_path)) + , m_test_root(move(test_root)) + , m_print_times(print_times) + , m_print_progress(print_progress) + { + VERIFY(!s_the); + s_the = this; + g_test_root = m_test_root; + } + + virtual ~TestRunner() = default; + + void run(); + + const Test::Counts& counts() const { return m_counts; } + + bool is_printing_progress() const { return m_print_progress; } + +protected: + static TestRunner* s_the; + + virtual Vector<String> get_test_paths() const; + virtual JSFileResult run_file_test(const String& test_path); + void print_file_result(const JSFileResult& file_result) const; + void print_test_results() const; + + String m_common_path; + String m_test_root; + bool m_print_times; + bool m_print_progress; + + double m_total_elapsed_time_in_ms { 0 }; + Test::Counts m_counts; + + RefPtr<JS::Program> m_test_program; +}; + +class TestRunnerGlobalObject final : public JS::GlobalObject { + JS_OBJECT(TestRunnerGlobalObject, JS::GlobalObject); + +public: + TestRunnerGlobalObject() = default; + virtual ~TestRunnerGlobalObject() override = default; + + virtual void initialize_global_object() override; +}; + +inline void TestRunnerGlobalObject::initialize_global_object() +{ + Base::initialize_global_object(); + define_property("global", this, JS::Attribute::Enumerable); + for (auto& entry : s_exposed_global_functions) { + define_native_function( + entry.key, [fn = entry.value.function](auto& vm, auto& global_object) { + return fn(vm, global_object); + }, + entry.value.length); + } +} + +inline void cleanup_and_exit() +{ + // Clear the taskbar progress. + if (TestRunner::the() && TestRunner::the()->is_printing_progress()) + warn("\033]9;-1;\033\\"); + exit(1); +} + +inline double get_time_in_ms() +{ + struct timeval tv1; + auto return_code = gettimeofday(&tv1, nullptr); + VERIFY(return_code >= 0); + return static_cast<double>(tv1.tv_sec) * 1000.0 + static_cast<double>(tv1.tv_usec) / 1000.0; +} + +template<typename Callback> +inline void iterate_directory_recursively(const String& directory_path, Callback callback) +{ + Core::DirIterator directory_iterator(directory_path, Core::DirIterator::Flags::SkipDots); + + while (directory_iterator.has_next()) { + auto file_path = directory_iterator.next_full_path(); + auto is_directory = Core::File::is_directory(file_path); + if (is_directory && !file_path.contains("/Fixtures")) { + iterate_directory_recursively(file_path, callback); + } else if (!is_directory) { + callback(move(file_path)); + } + } +} + +inline Vector<String> TestRunner::get_test_paths() const +{ + Vector<String> paths; + iterate_directory_recursively(m_test_root, [&](const String& file_path) { + if (!file_path.ends_with(".js")) + return; + if (!file_path.ends_with("test-common.js")) + paths.append(file_path); + }); + quick_sort(paths); + return paths; +} + +inline void TestRunner::run() +{ + size_t progress_counter = 0; + auto test_paths = get_test_paths(); + for (auto& path : test_paths) { + if (!path.matches(g_test_glob)) + continue; + ++progress_counter; + print_file_result(run_file_test(path)); + if (m_print_progress) + warn("\033]9;{};{};\033\\", progress_counter, test_paths.size()); + } + + if (m_print_progress) + warn("\033]9;-1;\033\\"); + + print_test_results(); +} + +inline AK::Result<NonnullRefPtr<JS::Program>, ParserError> parse_file(const String& file_path) +{ + auto file = Core::File::construct(file_path); + auto result = file->open(Core::OpenMode::ReadOnly); + if (!result) { + warnln("Failed to open the following file: \"{}\"", file_path); + cleanup_and_exit(); + } + + auto contents = file->read_all(); + String test_file_string(reinterpret_cast<const char*>(contents.data()), contents.size()); + file->close(); + + auto parser = JS::Parser(JS::Lexer(test_file_string)); + auto program = parser.parse_program(); + + if (parser.has_errors()) { + auto error = parser.errors()[0]; + return AK::Result<NonnullRefPtr<JS::Program>, ParserError>(ParserError { error, error.source_location_hint(test_file_string) }); + } + + return AK::Result<NonnullRefPtr<JS::Program>, ParserError>(program); +} + +inline Optional<JsonValue> get_test_results(JS::Interpreter& interpreter) +{ + auto result = g_vm->get_variable("__TestResults__", interpreter.global_object()); + auto json_string = JS::JSONObject::stringify_impl(interpreter.global_object(), result, JS::js_undefined(), JS::js_undefined()); + + auto json = JsonValue::from_string(json_string); + if (!json.has_value()) + return {}; + + return json.value(); +} + +inline JSFileResult TestRunner::run_file_test(const String& test_path) +{ + g_currently_running_test = test_path; + + double start_time = get_time_in_ms(); + auto interpreter = JS::Interpreter::create<TestRunnerGlobalObject>(*g_vm); + + // FIXME: This is a hack while we're refactoring Interpreter/VM stuff. + JS::VM::InterpreterExecutionScope scope(*interpreter); + + interpreter->heap().set_should_collect_on_every_allocation(g_collect_on_every_allocation); + + if (!m_test_program) { + auto result = parse_file(m_common_path); + if (result.is_error()) { + warnln("Unable to parse test-common.js"); + warnln("{}", result.error().error.to_string()); + warnln("{}", result.error().hint); + cleanup_and_exit(); + } + m_test_program = result.value(); + } + + interpreter->run(interpreter->global_object(), *m_test_program); + + auto file_program = parse_file(test_path); + if (file_program.is_error()) + return { test_path, file_program.error() }; + interpreter->run(interpreter->global_object(), *file_program.value()); + + if (g_vm->exception()) + g_vm->clear_exception(); + + auto test_json = get_test_results(*interpreter); + if (!test_json.has_value()) { + warnln("Received malformed JSON from test \"{}\"", test_path); + cleanup_and_exit(); + } + + JSFileResult file_result { test_path.substring(m_test_root.length() + 1, test_path.length() - m_test_root.length() - 1) }; + + // Collect logged messages + auto& arr = interpreter->vm().get_variable("__UserOutput__", interpreter->global_object()).as_array(); + for (auto& entry : arr.indexed_properties()) { + auto message = entry.value_and_attributes(&interpreter->global_object()).value; + file_result.logged_messages.append(message.to_string_without_side_effects()); + } + + test_json.value().as_object().for_each_member([&](const String& suite_name, const JsonValue& suite_value) { + Test::Suite suite { suite_name }; + + VERIFY(suite_value.is_object()); + + suite_value.as_object().for_each_member([&](const String& test_name, const JsonValue& test_value) { + Test::Case test { test_name, Test::Result::Fail, "" }; + + VERIFY(test_value.is_object()); + VERIFY(test_value.as_object().has("result")); + + auto result = test_value.as_object().get("result"); + VERIFY(result.is_string()); + auto result_string = result.as_string(); + if (result_string == "pass") { + test.result = Test::Result::Pass; + m_counts.tests_passed++; + } else if (result_string == "fail") { + test.result = Test::Result::Fail; + m_counts.tests_failed++; + suite.most_severe_test_result = Test::Result::Fail; + VERIFY(test_value.as_object().has("details")); + auto details = test_value.as_object().get("details"); + VERIFY(result.is_string()); + test.details = details.as_string(); + } else { + test.result = Test::Result::Skip; + if (suite.most_severe_test_result == Test::Result::Pass) + suite.most_severe_test_result = Test::Result::Skip; + m_counts.tests_skipped++; + } + + suite.tests.append(test); + }); + + if (suite.most_severe_test_result == Test::Result::Fail) { + m_counts.suites_failed++; + file_result.most_severe_test_result = Test::Result::Fail; + } else { + if (suite.most_severe_test_result == Test::Result::Skip && file_result.most_severe_test_result == Test::Result::Pass) + file_result.most_severe_test_result = Test::Result::Skip; + m_counts.suites_passed++; + } + + file_result.suites.append(suite); + }); + + m_counts.files_total++; + + file_result.time_taken = get_time_in_ms() - start_time; + m_total_elapsed_time_in_ms += file_result.time_taken; + + return file_result; +} + +enum Modifier { + BG_RED, + BG_GREEN, + FG_RED, + FG_GREEN, + FG_ORANGE, + FG_GRAY, + FG_BLACK, + FG_BOLD, + ITALIC, + CLEAR, +}; + +inline void print_modifiers(Vector<Modifier> modifiers) +{ + for (auto& modifier : modifiers) { + auto code = [&] { + switch (modifier) { + case BG_RED: + return "\033[48;2;255;0;102m"; + case BG_GREEN: + return "\033[48;2;102;255;0m"; + case FG_RED: + return "\033[38;2;255;0;102m"; + case FG_GREEN: + return "\033[38;2;102;255;0m"; + case FG_ORANGE: + return "\033[38;2;255;102;0m"; + case FG_GRAY: + return "\033[38;2;135;139;148m"; + case FG_BLACK: + return "\033[30m"; + case FG_BOLD: + return "\033[1m"; + case ITALIC: + return "\033[3m"; + case CLEAR: + return "\033[0m"; + } + VERIFY_NOT_REACHED(); + }(); + out("{}", code); + } +} + +inline void TestRunner::print_file_result(const JSFileResult& file_result) const +{ + if (file_result.most_severe_test_result == Test::Result::Fail || file_result.error.has_value()) { + print_modifiers({ BG_RED, FG_BLACK, FG_BOLD }); + out(" FAIL "); + print_modifiers({ CLEAR }); + } else { + if (m_print_times || file_result.most_severe_test_result != Test::Result::Pass) { + print_modifiers({ BG_GREEN, FG_BLACK, FG_BOLD }); + out(" PASS "); + print_modifiers({ CLEAR }); + } else { + return; + } + } + + out(" {}", file_result.name); + + if (m_print_times) { + print_modifiers({ CLEAR, ITALIC, FG_GRAY }); + if (file_result.time_taken < 1000) { + outln(" ({}ms)", static_cast<int>(file_result.time_taken)); + } else { + outln(" ({:3}s)", file_result.time_taken / 1000.0); + } + print_modifiers({ CLEAR }); + } else { + outln(); + } + + if (!file_result.logged_messages.is_empty()) { + print_modifiers({ FG_GRAY, FG_BOLD }); +#ifdef __serenity__ + outln(" âš Console output:"); +#else + // This emoji has a second invisible byte after it. The one above does not + outln(" âšī¸ Console output:"); +#endif + print_modifiers({ CLEAR, FG_GRAY }); + for (auto& message : file_result.logged_messages) + outln(" {}", message); + } + + if (file_result.error.has_value()) { + auto test_error = file_result.error.value(); + + print_modifiers({ FG_RED }); +#ifdef __serenity__ + outln(" â The file failed to parse"); +#else + // No invisible byte here, but the spacing still needs to be altered on the host + outln(" â The file failed to parse"); +#endif + outln(); + print_modifiers({ FG_GRAY }); + for (auto& message : test_error.hint.split('\n', true)) { + outln(" {}", message); + } + print_modifiers({ FG_RED }); + outln(" {}", test_error.error.to_string()); + outln(); + return; + } + + if (file_result.most_severe_test_result != Test::Result::Pass) { + for (auto& suite : file_result.suites) { + if (suite.most_severe_test_result == Test::Result::Pass) + continue; + + bool failed = suite.most_severe_test_result == Test::Result::Fail; + + print_modifiers({ FG_GRAY, FG_BOLD }); + + if (failed) { +#ifdef __serenity__ + out(" â Suite: "); +#else + // No invisible byte here, but the spacing still needs to be altered on the host + out(" â Suite: "); +#endif + } else { +#ifdef __serenity__ + out(" â Suite: "); +#else + // This emoji has a second invisible byte after it. The one above does not + out(" â ī¸ Suite: "); +#endif + } + + print_modifiers({ CLEAR, FG_GRAY }); + + if (suite.name == TOP_LEVEL_TEST_NAME) { + outln("<top-level>"); + } else { + outln("{}", suite.name); + } + print_modifiers({ CLEAR }); + + for (auto& test : suite.tests) { + if (test.result == Test::Result::Pass) + continue; + + print_modifiers({ FG_GRAY, FG_BOLD }); + out(" Test: "); + if (test.result == Test::Result::Fail) { + print_modifiers({ CLEAR, FG_RED }); + outln("{} (failed):", test.name); + outln(" {}", test.details); + } else { + print_modifiers({ CLEAR, FG_ORANGE }); + outln("{} (skipped)", test.name); + } + print_modifiers({ CLEAR }); + } + } + } +} + +inline void TestRunner::print_test_results() const +{ + out("\nTest Suites: "); + if (m_counts.suites_failed) { + print_modifiers({ FG_RED }); + out("{} failed, ", m_counts.suites_failed); + print_modifiers({ CLEAR }); + } + if (m_counts.suites_passed) { + print_modifiers({ FG_GREEN }); + out("{} passed, ", m_counts.suites_passed); + print_modifiers({ CLEAR }); + } + outln("{} total", m_counts.suites_failed + m_counts.suites_passed); + + out("Tests: "); + if (m_counts.tests_failed) { + print_modifiers({ FG_RED }); + out("{} failed, ", m_counts.tests_failed); + print_modifiers({ CLEAR }); + } + if (m_counts.tests_skipped) { + print_modifiers({ FG_ORANGE }); + out("{} skipped, ", m_counts.tests_skipped); + print_modifiers({ CLEAR }); + } + if (m_counts.tests_passed) { + print_modifiers({ FG_GREEN }); + out("{} passed, ", m_counts.tests_passed); + print_modifiers({ CLEAR }); + } + outln("{} total", m_counts.tests_failed + m_counts.tests_skipped + m_counts.tests_passed); + + outln("Files: {} total", m_counts.files_total); + + out("Time: "); + if (m_total_elapsed_time_in_ms < 1000.0) { + outln("{}ms", static_cast<int>(m_total_elapsed_time_in_ms)); + } else { + outln("{:>.3}s", m_total_elapsed_time_in_ms / 1000.0); + } + outln(); +} +} diff --git a/Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp b/Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp new file mode 100644 index 0000000000..5e9889d4ca --- /dev/null +++ b/Userland/Libraries/LibTest/JavaScriptTestRunnerMain.cpp @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2020, Matthew Olsson <mattco@serenityos.org> + * Copyright (c) 2020-2021, Linus Groh <linusg@serenityos.org> + * Copyright (c) 2021, Ali Mohammad Pur <mpfard@serenityos.org> + * + * SPDX-License-Identifier: BSD-2-Clause + */ + +#include <LibTest/JavaScriptTestRunner.h> +#include <signal.h> +#include <stdio.h> + +namespace Test::JS { + +RefPtr<::JS::VM> g_vm; +bool g_collect_on_every_allocation = false; +String g_currently_running_test; +String g_test_glob; +HashMap<String, FunctionWithLength> s_exposed_global_functions; +Function<void()> g_main_hook; +TestRunner* TestRunner::s_the = nullptr; +String g_test_root; +int g_test_argc; +char** g_test_argv; + +} + +using namespace Test::JS; + +static StringView g_program_name { "test-js"sv }; + +static void handle_sigabrt(int) +{ + dbgln("{}: SIGABRT received, cleaning up.", g_program_name); + cleanup_and_exit(); +} + +int main(int argc, char** argv) +{ + g_test_argc = argc; + g_test_argv = argv; + auto program_name = LexicalPath { argv[0] }.basename(); + g_program_name = program_name; + + struct sigaction act; + memset(&act, 0, sizeof(act)); + act.sa_flags = SA_NOCLDWAIT; + act.sa_handler = handle_sigabrt; + int rc = sigaction(SIGABRT, &act, nullptr); + if (rc < 0) { + perror("sigaction"); + return 1; + } + +#ifdef SIGINFO + signal(SIGINFO, [](int) { + static char buffer[4096]; + auto& counts = TestRunner::the()->counts(); + int len = snprintf(buffer, sizeof(buffer), "Pass: %d, Fail: %d, Skip: %d\nCurrent test: %s\n", counts.tests_passed, counts.tests_failed, counts.tests_skipped, g_currently_running_test.characters()); + write(STDOUT_FILENO, buffer, len); + }); +#endif + + bool print_times = false; + bool print_progress = +#ifdef __serenity__ + true; // Use OSC 9 to print progress +#else + false; +#endif + const char* specified_test_root = nullptr; + String common_path; + + Core::ArgsParser args_parser; + args_parser.add_option(print_times, "Show duration of each test", "show-time", 't'); + args_parser.add_option(Core::ArgsParser::Option { + .requires_argument = true, + .help_string = "Show progress with OSC 9 (true, false)", + .long_name = "show-progress", + .short_name = 'p', + .accept_value = [&](auto* str) { + if (StringView { "true" } == str) + print_progress = true; + else if (StringView { "false" } == str) + print_progress = false; + else + return false; + return true; + }, + }); + args_parser.add_option(g_collect_on_every_allocation, "Collect garbage after every allocation", "collect-often", 'g'); + args_parser.add_option(g_test_glob, "Only run tests matching the given glob", "filter", 'f', "glob"); + args_parser.add_positional_argument(specified_test_root, "Tests root directory", "path", Core::ArgsParser::Required::No); + args_parser.add_positional_argument(common_path, "Path to tests-common.js", "common-path", Core::ArgsParser::Required::No); + args_parser.parse(argc, argv); + + g_test_glob = String::formatted("*{}*", g_test_glob); + + if (getenv("DISABLE_DBG_OUTPUT")) { + AK::set_debug_enabled(false); + } + + String test_root; + + if (specified_test_root) { + test_root = String { specified_test_root }; + } else { +#ifdef __serenity__ + test_root = LexicalPath::join("/home/anon", String::formatted("{}-tests", program_name.split_view('-').last())).string(); +#else + char* serenity_source_dir = getenv("SERENITY_SOURCE_DIR"); + if (!serenity_source_dir) { + warnln("No test root given, {} requires the SERENITY_SOURCE_DIR environment variable to be set", g_program_name); + return 1; + } + test_root = String::formatted("{}/{}", serenity_source_dir, g_test_root_fragment); + common_path = String::formatted("{}/Userland/Libraries/LibJS/Tests/test-common.js", serenity_source_dir); +#endif + } + if (!Core::File::is_directory(test_root)) { + warnln("Test root is not a directory: {}", test_root); + return 1; + } + + if (common_path.is_empty()) { +#ifdef __serenity__ + common_path = "/home/anon/js-tests/test-common.js"; +#else + char* serenity_source_dir = getenv("SERENITY_SOURCE_DIR"); + if (!serenity_source_dir) { + warnln("No test root given, {} requires the SERENITY_SOURCE_DIR environment variable to be set", g_program_name); + return 1; + } + common_path = String::formatted("{}/Userland/Libraries/LibJS/Tests/test-common.js", serenity_source_dir); +#endif + } + + if (chdir(test_root.characters()) < 0) { + auto saved_errno = errno; + warnln("chdir failed: {}", strerror(saved_errno)); + return 1; + } + + if (g_main_hook) + g_main_hook(); + + if (!g_vm) + g_vm = JS::VM::create(); + + TestRunner test_runner(test_root, common_path, print_times, print_progress); + test_runner.run(); + + g_vm = nullptr; + + return test_runner.counts().tests_failed > 0 ? 1 : 0; +} |