test: support flexibly setting custom task options
Add a single TEST_ENTRY_CUSTOM hook that can be used to override task entry defaults. Different tests can have different timeouts depending on what is appropriate for each test. A separate TEST_OUTPUT_ENTRY hook is no longer necessary. In order to support per-task timeouts, the timeout field has been moved into the task_entry_t struct. The default (5000) is now set as part of TEST_ENTRY.
This commit is contained in:
parent
513ce625ba
commit
756087e017
@ -29,9 +29,6 @@
|
||||
#include "benchmark-list.h"
|
||||
|
||||
|
||||
/* The time in milliseconds after which a single benchmark times out. */
|
||||
#define BENCHMARK_TIMEOUT 60000
|
||||
|
||||
static int maybe_run_test(int argc, char **argv);
|
||||
|
||||
|
||||
@ -39,7 +36,7 @@ int main(int argc, char **argv) {
|
||||
platform_init(argc, argv);
|
||||
|
||||
switch (argc) {
|
||||
case 1: return run_tests(BENCHMARK_TIMEOUT, 1);
|
||||
case 1: return run_tests(1);
|
||||
case 2: return maybe_run_test(argc, argv);
|
||||
case 3: return run_test_part(argv[1], argv[2]);
|
||||
default:
|
||||
@ -60,5 +57,5 @@ static int maybe_run_test(int argc, char **argv) {
|
||||
return 42;
|
||||
}
|
||||
|
||||
return run_test(argv[1], BENCHMARK_TIMEOUT, 1, 1);
|
||||
return run_test(argv[1], 1, 1);
|
||||
}
|
||||
|
||||
@ -36,9 +36,6 @@
|
||||
/* Actual tests and helpers are defined in test-list.h */
|
||||
#include "test-list.h"
|
||||
|
||||
/* The time in milliseconds after which a single test times out. */
|
||||
#define TEST_TIMEOUT 5000
|
||||
|
||||
int ipc_helper(int listen_after_write);
|
||||
int ipc_helper_tcp_connection(void);
|
||||
int ipc_send_recv_helper(void);
|
||||
@ -53,7 +50,7 @@ int main(int argc, char **argv) {
|
||||
argv = uv_setup_args(argc, argv);
|
||||
|
||||
switch (argc) {
|
||||
case 1: return run_tests(TEST_TIMEOUT, 0);
|
||||
case 1: return run_tests(0);
|
||||
case 2: return maybe_run_test(argc, argv);
|
||||
case 3: return run_test_part(argv[1], argv[2]);
|
||||
default:
|
||||
@ -155,5 +152,5 @@ static int maybe_run_test(int argc, char **argv) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return run_test(argv[1], TEST_TIMEOUT, 0, 1);
|
||||
return run_test(argv[1], 0, 1);
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ const char* fmt(double d) {
|
||||
}
|
||||
|
||||
|
||||
int run_tests(int timeout, int benchmark_output) {
|
||||
int run_tests(int benchmark_output) {
|
||||
int total;
|
||||
int passed;
|
||||
int failed;
|
||||
@ -130,7 +130,7 @@ int run_tests(int timeout, int benchmark_output) {
|
||||
log_progress(total, passed, failed, todos, skipped, task->task_name);
|
||||
}
|
||||
|
||||
test_result = run_test(task->task_name, timeout, benchmark_output, current);
|
||||
test_result = run_test(task->task_name, benchmark_output, current);
|
||||
switch (test_result) {
|
||||
case TEST_OK: passed++; break;
|
||||
case TEST_TODO: todos++; break;
|
||||
@ -189,7 +189,6 @@ void log_tap_result(int test_count,
|
||||
|
||||
|
||||
int run_test(const char* test,
|
||||
int timeout,
|
||||
int benchmark_output,
|
||||
int test_count) {
|
||||
char errmsg[1024] = "no error";
|
||||
@ -279,7 +278,7 @@ int run_test(const char* test,
|
||||
goto out;
|
||||
}
|
||||
|
||||
result = process_wait(main_proc, 1, timeout);
|
||||
result = process_wait(main_proc, 1, task->timeout);
|
||||
if (result == -1) {
|
||||
FATAL("process_wait failed");
|
||||
} else if (result == -2) {
|
||||
|
||||
@ -41,6 +41,11 @@ typedef struct {
|
||||
int (*main)(void);
|
||||
int is_helper;
|
||||
int show_output;
|
||||
|
||||
/*
|
||||
* The time in milliseconds after which a single test or benchmark times out.
|
||||
*/
|
||||
int timeout;
|
||||
} task_entry_t, bench_entry_t;
|
||||
|
||||
|
||||
@ -51,29 +56,29 @@ typedef struct {
|
||||
task_entry_t TASKS[] = {
|
||||
|
||||
#define TASK_LIST_END \
|
||||
{ 0, 0, 0, 0, 0 } \
|
||||
{ 0, 0, 0, 0, 0, 0 } \
|
||||
};
|
||||
|
||||
#define TEST_DECLARE(name) \
|
||||
int run_test_##name(void);
|
||||
|
||||
#define TEST_ENTRY(name) \
|
||||
{ #name, #name, &run_test_##name, 0, 0 },
|
||||
{ #name, #name, &run_test_##name, 0, 0, 5000 },
|
||||
|
||||
#define TEST_OUTPUT_ENTRY(name) \
|
||||
{ #name, #name, &run_test_##name, 0, 1 },
|
||||
#define TEST_ENTRY_CUSTOM(name, is_helper, show_output, timeout) \
|
||||
{ #name, #name, &run_test_##name, is_helper, show_output, timeout },
|
||||
|
||||
#define BENCHMARK_DECLARE(name) \
|
||||
int run_benchmark_##name(void);
|
||||
|
||||
#define BENCHMARK_ENTRY(name) \
|
||||
{ #name, #name, &run_benchmark_##name, 0, 0 },
|
||||
{ #name, #name, &run_benchmark_##name, 0, 0, 60000 },
|
||||
|
||||
#define HELPER_DECLARE(name) \
|
||||
int run_helper_##name(void);
|
||||
|
||||
#define HELPER_ENTRY(task_name, name) \
|
||||
{ #task_name, #name, &run_helper_##name, 1, 0 },
|
||||
{ #task_name, #name, &run_helper_##name, 1, 0, 0 },
|
||||
|
||||
#define TEST_HELPER HELPER_ENTRY
|
||||
#define BENCHMARK_HELPER HELPER_ENTRY
|
||||
@ -97,13 +102,12 @@ extern task_entry_t TASKS[];
|
||||
/*
|
||||
* Run all tests.
|
||||
*/
|
||||
int run_tests(int timeout, int benchmark_output);
|
||||
int run_tests(int benchmark_output);
|
||||
|
||||
/*
|
||||
* Run a single test. Starts up any helpers.
|
||||
*/
|
||||
int run_test(const char* test,
|
||||
int timeout,
|
||||
int benchmark_output,
|
||||
int test_count);
|
||||
|
||||
|
||||
@ -248,7 +248,7 @@ HELPER_DECLARE (pipe_echo_server)
|
||||
|
||||
|
||||
TASK_LIST_START
|
||||
TEST_OUTPUT_ENTRY (platform_output)
|
||||
TEST_ENTRY_CUSTOM (platform_output, 0, 1, 5000)
|
||||
|
||||
#if 0
|
||||
TEST_ENTRY (callback_order)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user