Commit 264b87ff authored by Alexander Timin's avatar Alexander Timin Committed by Commit Bot

[base/test] Run tests in parallel for --gtest-repeat.

Ensure that the test launcher can run a test multiple times in parallel
to enable us finding flaky tests faster.

Also add documentation for --gtest-repeat and --gtest-break-on-failure.

R=erikchen@chromium.org
BUG=976795

Change-Id: I767a618fda6ad8f7424b43b0bf1a06b5238f127a
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1667313
Commit-Queue: Alexander Timin <altimin@chromium.org>
Reviewed-by: default avatarIlia Samsonov <isamsonov@google.com>
Cr-Commit-Position: refs/heads/master@{#804465}
parent fdcf670f
...@@ -928,11 +928,18 @@ bool TestLauncher::Run(CommandLine* command_line) { ...@@ -928,11 +928,18 @@ bool TestLauncher::Run(CommandLine* command_line) {
// Indicate a test did not succeed. // Indicate a test did not succeed.
bool test_failed = false; bool test_failed = false;
int cycles = cycles_; int iterations = cycles_;
if (cycles_ > 1 && !stop_on_failure_) {
// If we don't stop on failure, execute all the repeats in all iteration,
// which allows us to parallelize the execution.
iterations = 1;
repeats_per_iteration_ = cycles_;
}
// Set to false if any iteration fails. // Set to false if any iteration fails.
bool run_result = true; bool run_result = true;
while ((cycles > 0 || cycles == -1) && !(stop_on_failure_ && test_failed)) { while ((iterations > 0 || iterations == -1) &&
!(stop_on_failure_ && test_failed)) {
OnTestIterationStart(); OnTestIterationStart();
RunTests(); RunTests();
...@@ -949,7 +956,7 @@ bool TestLauncher::Run(CommandLine* command_line) { ...@@ -949,7 +956,7 @@ bool TestLauncher::Run(CommandLine* command_line) {
test_failed = test_success_count_ != test_finished_count_; test_failed = test_success_count_ != test_finished_count_;
OnTestIterationFinished(); OnTestIterationFinished();
// Special value "-1" means "repeat indefinitely". // Special value "-1" means "repeat indefinitely".
cycles = (cycles == -1) ? cycles : cycles - 1; iterations = (iterations == -1) ? iterations : iterations - 1;
} }
if (cycles_ != 1) if (cycles_ != 1)
...@@ -1347,6 +1354,11 @@ bool TestLauncher::Init(CommandLine* command_line) { ...@@ -1347,6 +1354,11 @@ bool TestLauncher::Init(CommandLine* command_line) {
} }
retry_limit_ = retry_limit; retry_limit_ = retry_limit;
} else if (command_line->HasSwitch(kGTestRepeatFlag) ||
command_line->HasSwitch(kGTestBreakOnFailure)) {
// If we are repeating tests or waiting for the first test to fail, disable
// retries.
retry_limit_ = 0U;
} else if (!BotModeEnabled(command_line) && } else if (!BotModeEnabled(command_line) &&
(command_line->HasSwitch(kGTestFilterFlag) || (command_line->HasSwitch(kGTestFilterFlag) ||
command_line->HasSwitch(kIsolatedScriptTestFilterFlag))) { command_line->HasSwitch(kIsolatedScriptTestFilterFlag))) {
...@@ -1678,15 +1690,11 @@ void TestLauncher::CombinePositiveTestFilters( ...@@ -1678,15 +1690,11 @@ void TestLauncher::CombinePositiveTestFilters(
} }
} }
void TestLauncher::RunTests() { std::vector<std::string> TestLauncher::CollectTests() {
std::vector<std::string> test_names; std::vector<std::string> test_names;
size_t test_found_count = 0;
for (const TestInfo& test_info : tests_) { for (const TestInfo& test_info : tests_) {
std::string test_name = test_info.GetFullName(); std::string test_name = test_info.GetFullName();
// Count tests in the binary, before we apply filter and sharding.
test_found_count++;
std::string prefix_stripped_name = test_info.GetPrefixStrippedName(); std::string prefix_stripped_name = test_info.GetPrefixStrippedName();
// Skip the test that doesn't match the filter (if given). // Skip the test that doesn't match the filter (if given).
...@@ -1738,11 +1746,19 @@ void TestLauncher::RunTests() { ...@@ -1738,11 +1746,19 @@ void TestLauncher::RunTests() {
test_names.push_back(test_name); test_names.push_back(test_name);
} }
// Save an early test summary in case the launcher crashes or gets killed. return test_names;
results_tracker_.GeneratePlaceholderIteration(); }
MaybeSaveSummaryAsJSON({"EARLY_SUMMARY"});
void TestLauncher::RunTests() {
std::vector<std::string> original_test_names = CollectTests();
broken_threshold_ = std::max(static_cast<size_t>(20), test_found_count / 10); std::vector<std::string> test_names;
for (int i = 0; i < repeats_per_iteration_; ++i) {
test_names.insert(test_names.end(), original_test_names.begin(),
original_test_names.end());
}
broken_threshold_ = std::max(static_cast<size_t>(20), tests_.size() / 10);
test_started_count_ = test_names.size(); test_started_count_ = test_names.size();
...@@ -1754,8 +1770,17 @@ void TestLauncher::RunTests() { ...@@ -1754,8 +1770,17 @@ void TestLauncher::RunTests() {
fflush(stdout); fflush(stdout);
} }
TestRunner test_runner(this, parallel_jobs_, // Save an early test summary in case the launcher crashes or gets killed.
launcher_delegate_->GetBatchSize()); results_tracker_.GeneratePlaceholderIteration();
MaybeSaveSummaryAsJSON({"EARLY_SUMMARY"});
// If we are repeating the test, set batch size to 1 to ensure that batch size
// does not interfere with repeats (unittests are using filter for batches and
// can't run the same test twice in the same batch).
size_t batch_size =
repeats_per_iteration_ > 1 ? 1U : launcher_delegate_->GetBatchSize();
TestRunner test_runner(this, parallel_jobs_, batch_size);
test_runner.Run(test_names); test_runner.Run(test_names);
} }
......
...@@ -234,6 +234,8 @@ class TestLauncher { ...@@ -234,6 +234,8 @@ class TestLauncher {
bool was_timeout, bool was_timeout,
int leaked_items); int leaked_items);
std::vector<std::string> CollectTests();
// Make sure we don't accidentally call the wrong methods e.g. on the worker // Make sure we don't accidentally call the wrong methods e.g. on the worker
// pool thread. Should be the first member so that it's destroyed last: when // pool thread. Should be the first member so that it's destroyed last: when
// destroying other members, especially the worker pool, we may check the code // destroying other members, especially the worker pool, we may check the code
...@@ -311,6 +313,11 @@ class TestLauncher { ...@@ -311,6 +313,11 @@ class TestLauncher {
// redirect stdio of subprocess // redirect stdio of subprocess
bool redirect_stdio_; bool redirect_stdio_;
// Number of times all tests should be repeated during each iteration.
// 1 if gtest_repeat is not specified or gtest_break_on_failure is specified.
// Otherwise it matches gtest_repeat value.
int repeats_per_iteration_ = 1;
DISALLOW_COPY_AND_ASSIGN(TestLauncher); DISALLOW_COPY_AND_ASSIGN(TestLauncher);
}; };
......
...@@ -265,18 +265,37 @@ TEST_F(TestLauncherTest, FilterIncludePreTest) { ...@@ -265,18 +265,37 @@ TEST_F(TestLauncherTest, FilterIncludePreTest) {
} }
// Test TestLauncher "gtest_repeat" switch. // Test TestLauncher "gtest_repeat" switch.
TEST_F(TestLauncherTest, RunningMultipleIterations) { TEST_F(TestLauncherTest, RepeatTest) {
AddMockedTests("Test", {"firstTest"}); AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
// Unless --gtest-break-on-failure is specified,
command_line->AppendSwitchASCII("gtest_repeat", "2"); command_line->AppendSwitchASCII("gtest_repeat", "2");
using ::testing::_; using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
.Times(2) .Times(2)
.WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest", .WillRepeatedly(::testing::DoAll(OnTestResult(
TestResult::TEST_SUCCESS)); &test_launcher, "Test.firstTest", TestResult::TEST_SUCCESS)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
// Test TestLauncher --gtest_repeat and --gtest_break_on_failure.
TEST_F(TestLauncherTest, RunningMultipleIterationsUntilFailure) {
AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls();
// Unless --gtest-break-on-failure is specified,
command_line->AppendSwitchASCII("gtest_repeat", "4");
command_line->AppendSwitch("gtest_break_on_failure");
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
.WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS)))
.WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS)))
.WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_FAILURE)));
EXPECT_FALSE(test_launcher.Run(command_line.get()));
}
// Test TestLauncher will retry failed test, and stop on success. // Test TestLauncher will retry failed test, and stop on success.
TEST_F(TestLauncherTest, SuccessOnRetryTests) { TEST_F(TestLauncherTest, SuccessOnRetryTests) {
AddMockedTests("Test", {"firstTest"}); AddMockedTests("Test", {"firstTest"});
...@@ -516,6 +535,8 @@ TEST_F(TestLauncherTest, JsonSummary) { ...@@ -516,6 +535,8 @@ TEST_F(TestLauncherTest, JsonSummary) {
FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json"); FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
command_line->AppendSwitchPath("test-launcher-summary-output", path); command_line->AppendSwitchPath("test-launcher-summary-output", path);
command_line->AppendSwitchASCII("gtest_repeat", "2"); command_line->AppendSwitchASCII("gtest_repeat", "2");
// Force the repeats to run sequentially.
command_line->AppendSwitch("gtest_break_on_failure");
// Setup results to be returned by the test launcher delegate. // Setup results to be returned by the test launcher delegate.
TestResult first_result = TestResult first_result =
......
...@@ -49,68 +49,82 @@ const size_t kDefaultTestBatchLimit = 10; ...@@ -49,68 +49,82 @@ const size_t kDefaultTestBatchLimit = 10;
#if !defined(OS_ANDROID) #if !defined(OS_ANDROID)
void PrintUsage() { void PrintUsage() {
fprintf(stdout, fprintf(
"Runs tests using the gtest framework, each batch of tests being\n" stdout,
"run in their own process. Supported command-line flags:\n" "Runs tests using the gtest framework, each batch of tests being\n"
"\n" "run in their own process. Supported command-line flags:\n"
" Common flags:\n" "\n"
" --gtest_filter=...\n" " Common flags:\n"
" Runs a subset of tests (see --gtest_help for more info).\n" " --gtest_filter=...\n"
"\n" " Runs a subset of tests (see --gtest_help for more info).\n"
" --help\n" "\n"
" Shows this message.\n" " --help\n"
"\n" " Shows this message.\n"
" --gtest_help\n" "\n"
" Shows the gtest help message.\n" " --gtest_help\n"
"\n" " Shows the gtest help message.\n"
" --test-launcher-jobs=N\n" "\n"
" Sets the number of parallel test jobs to N.\n" " --test-launcher-jobs=N\n"
"\n" " Sets the number of parallel test jobs to N.\n"
" --single-process-tests\n" "\n"
" Runs the tests and the launcher in the same process. Useful\n" " --single-process-tests\n"
" for debugging a specific test in a debugger.\n" " Runs the tests and the launcher in the same process. Useful\n"
"\n" " for debugging a specific test in a debugger.\n"
" Other flags:\n" "\n"
" --test-launcher-filter-file=PATH\n" " Other flags:\n"
" Like --gtest_filter, but read the test filter from PATH.\n" " --test-launcher-filter-file=PATH\n"
" Supports multiple filter paths separated by ';'.\n" " Like --gtest_filter, but read the test filter from PATH.\n"
" One pattern per line; lines starting with '-' are exclusions.\n" " Supports multiple filter paths separated by ';'.\n"
" See also //testing/buildbot/filters/README.md file.\n" " One pattern per line; lines starting with '-' are exclusions.\n"
"\n" " See also //testing/buildbot/filters/README.md file.\n"
" --test-launcher-batch-limit=N\n" "\n"
" Sets the limit of test batch to run in a single process to N.\n" " --test-launcher-batch-limit=N\n"
"\n" " Sets the limit of test batch to run in a single process to N.\n"
" --test-launcher-debug-launcher\n" "\n"
" Disables autodetection of debuggers and similar tools,\n" " --test-launcher-debug-launcher\n"
" making it possible to use them to debug launcher itself.\n" " Disables autodetection of debuggers and similar tools,\n"
"\n" " making it possible to use them to debug launcher itself.\n"
" --test-launcher-retry-limit=N\n" "\n"
" Sets the limit of test retries on failures to N.\n" " --test-launcher-retry-limit=N\n"
"\n" " Sets the limit of test retries on failures to N.\n"
" --test-launcher-summary-output=PATH\n" " --gtest-repeat=N\n"
" Saves a JSON machine-readable summary of the run.\n" " Forces the launcher to run every test N times. -1 is a special"
"\n" " value, causing the infinite amount of iterations."
" --test-launcher-print-test-stdio=auto|always|never\n" " Repeated tests are run in parallel, unless the number of"
" Controls when full test output is printed.\n" " iterations is infinite or --gtest-break-on-failure is specified"
" auto means to print it when the test failed.\n" " (see below)."
"\n" " Consider using --test_launcher-jobs flag to speed up the"
" --test-launcher-test-part-results-limit=N\n" " parallel execution."
" Sets the limit of failed EXPECT/ASSERT entries in the xml and\n" "\n"
" JSON outputs per test to N (default N=10). Negative value \n" " --gtest-break-on-failure\n"
" will disable this limit.\n" " Stop running repeated tests as soon as one repeat of the test fails."
"\n" " This flag forces sequential repeats and prevents parallelised"
" --test-launcher-total-shards=N\n" " execution."
" Sets the total number of shards to N.\n" "\n"
"\n" " --test-launcher-summary-output=PATH\n"
" --test-launcher-shard-index=N\n" " Saves a JSON machine-readable summary of the run.\n"
" Sets the shard index to run to N (from 0 to TOTAL - 1).\n" "\n"
"\n" " --test-launcher-print-test-stdio=auto|always|never\n"
" --dont-use-job-objects\n" " Controls when full test output is printed.\n"
" Avoids using job objects in Windows.\n" " auto means to print it when the test failed.\n"
"\n" "\n"
" --test-launcher-print-temp-leaks\n" " --test-launcher-test-part-results-limit=N\n"
" Prints information about leaked files and/or directories in\n" " Sets the limit of failed EXPECT/ASSERT entries in the xml and\n"
" child process's temporary directories (Windows and macOS).\n"); " JSON outputs per test to N (default N=10). Negative value \n"
" will disable this limit.\n"
"\n"
" --test-launcher-total-shards=N\n"
" Sets the total number of shards to N.\n"
"\n"
" --test-launcher-shard-index=N\n"
" Sets the shard index to run to N (from 0 to TOTAL - 1).\n"
"\n"
" --dont-use-job-objects\n"
" Avoids using job objects in Windows.\n"
"\n"
" --test-launcher-print-temp-leaks\n"
" Prints information about leaked files and/or directories in\n"
" child process's temporary directories (Windows and macOS).\n");
fflush(stdout); fflush(stdout);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment