Commit 63645790 authored by Ilia Samsonov's avatar Ilia Samsonov Committed by Commit Bot

Moved pre test logic to test launcher.

First, we make sure we reorder all tests in test launcher,
we guarantee that "PRE_test" will precede "test" for the delegate.

Test shuffeling was moved before re-ordering test for dependent test

Finally, content test launcher delegate was simplified.
It still has some logic to group dependent together
so they run consecutively in parallel launch.

Bug: 936248
Change-Id: I7d34e45eee1c2c0c88ded8e5cc054327fe27d7b3
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1638139
Commit-Queue: Ilia Samsonov <isamsonov@google.com>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarErik Chen <erikchen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#666365}
parent 754217ec
...@@ -589,6 +589,9 @@ class TestLauncher::TestInfo { ...@@ -589,6 +589,9 @@ class TestLauncher::TestInfo {
// Returns test name with PRE_ prefix added, excluding DISABLE_ prefix. // Returns test name with PRE_ prefix added, excluding DISABLE_ prefix.
std::string GetPreName() const; std::string GetPreName() const;
// Returns test name excluding DISABLED_ and PRE_ prefixes.
std::string GetPrefixStrippedName() const;
const std::string& test_case_name() const { return test_case_name_; } const std::string& test_case_name() const { return test_case_name_; }
const std::string& test_name() const { return test_name_; } const std::string& test_name() const { return test_name_; }
const std::string& file() const { return file_; } const std::string& file() const { return file_; }
...@@ -636,6 +639,12 @@ std::string TestLauncher::TestInfo::GetPreName() const { ...@@ -636,6 +639,12 @@ std::string TestLauncher::TestInfo::GetPreName() const {
return FormatFullTestName(case_name, kPreTestPrefix + name); return FormatFullTestName(case_name, kPreTestPrefix + name);
} }
std::string TestLauncher::TestInfo::GetPrefixStrippedName() const {
std::string test_name = GetDisabledStrippedName();
ReplaceSubstringsAfterOffset(&test_name, 0, kPreTestPrefix, std::string());
return test_name;
}
TestLauncherDelegate::~TestLauncherDelegate() = default; TestLauncherDelegate::~TestLauncherDelegate() = default;
TestLauncher::LaunchOptions::LaunchOptions() = default; TestLauncher::LaunchOptions::LaunchOptions() = default;
...@@ -656,8 +665,6 @@ TestLauncher::TestLauncher(TestLauncherDelegate* launcher_delegate, ...@@ -656,8 +665,6 @@ TestLauncher::TestLauncher(TestLauncherDelegate* launcher_delegate,
test_broken_count_(0), test_broken_count_(0),
retry_limit_(0), retry_limit_(0),
force_run_broken_tests_(false), force_run_broken_tests_(false),
shuffle_(false),
shuffle_seed_(0),
watchdog_timer_(FROM_HERE, watchdog_timer_(FROM_HERE,
kOutputTimeout, kOutputTimeout,
this, this,
...@@ -796,7 +803,12 @@ void TestLauncher::OnTestFinished(const TestResult& original_result) { ...@@ -796,7 +803,12 @@ void TestLauncher::OnTestFinished(const TestResult& original_result) {
if (result.status == TestResult::TEST_SUCCESS) { if (result.status == TestResult::TEST_SUCCESS) {
++test_success_count_; ++test_success_count_;
} else { } else {
tests_to_retry_.insert(result.full_name); // Records prefix stripped name to run all dependent tests.
std::string test_name(result.full_name);
ReplaceSubstringsAfterOffset(&test_name, 0, kPreTestPrefix, std::string());
ReplaceSubstringsAfterOffset(&test_name, 0, kDisabledTestPrefix,
std::string());
tests_to_retry_.insert(test_name);
} }
// There are no results for this tests, // There are no results for this tests,
...@@ -998,38 +1010,6 @@ bool TestLauncher::Init(CommandLine* command_line) { ...@@ -998,38 +1010,6 @@ bool TestLauncher::Init(CommandLine* command_line) {
force_run_broken_tests_ = force_run_broken_tests_ =
command_line->HasSwitch(switches::kTestLauncherForceRunBrokenTests); command_line->HasSwitch(switches::kTestLauncherForceRunBrokenTests);
// Some of the TestLauncherDelegate implementations don't call into gtest
// until they've already split into test-specific processes. This results
// in gtest's native shuffle implementation attempting to shuffle one test.
// Shuffling the list of tests in the test launcher (before the delegate
// gets involved) ensures that the entire shard is shuffled.
if (command_line->HasSwitch(kGTestShuffleFlag)) {
shuffle_ = true;
if (command_line->HasSwitch(kGTestRandomSeedFlag)) {
const std::string custom_seed_str =
command_line->GetSwitchValueASCII(kGTestRandomSeedFlag);
uint32_t custom_seed = 0;
if (!StringToUint(custom_seed_str, &custom_seed)) {
LOG(ERROR) << "Unable to parse seed \"" << custom_seed_str << "\".";
return false;
}
if (custom_seed >= kRandomSeedUpperBound) {
LOG(ERROR) << "Seed " << custom_seed << " outside of expected range "
<< "[0, " << kRandomSeedUpperBound << ")";
return false;
}
shuffle_seed_ = custom_seed;
} else {
std::uniform_int_distribution<uint32_t> dist(0, kRandomSeedUpperBound);
std::random_device random_dev;
shuffle_seed_ = dist(random_dev);
}
} else if (command_line->HasSwitch(kGTestRandomSeedFlag)) {
LOG(ERROR) << kGTestRandomSeedFlag << " requires " << kGTestShuffleFlag;
return false;
}
fprintf(stdout, "Using %zu parallel jobs.\n", parallel_jobs_); fprintf(stdout, "Using %zu parallel jobs.\n", parallel_jobs_);
fflush(stdout); fflush(stdout);
...@@ -1077,7 +1057,10 @@ bool TestLauncher::Init(CommandLine* command_line) { ...@@ -1077,7 +1057,10 @@ bool TestLauncher::Init(CommandLine* command_line) {
if (!InitTests()) if (!InitTests())
return false; return false;
if (!ValidateTests()) if (!ShuffleTests(command_line))
return false;
if (!ReorderAndValidateTests())
return false; return false;
if (command_line->HasSwitch(switches::kTestLauncherPrintTestStdio)) { if (command_line->HasSwitch(switches::kTestLauncherPrintTestStdio)) {
...@@ -1207,19 +1190,56 @@ bool TestLauncher::InitTests() { ...@@ -1207,19 +1190,56 @@ bool TestLauncher::InitTests() {
return true; return true;
} }
bool TestLauncher::ValidateTests() { bool TestLauncher::ShuffleTests(CommandLine* command_line) {
if (command_line->HasSwitch(kGTestShuffleFlag)) {
uint32_t shuffle_seed;
if (command_line->HasSwitch(kGTestRandomSeedFlag)) {
const std::string custom_seed_str =
command_line->GetSwitchValueASCII(kGTestRandomSeedFlag);
uint32_t custom_seed = 0;
if (!StringToUint(custom_seed_str, &custom_seed)) {
LOG(ERROR) << "Unable to parse seed \"" << custom_seed_str << "\".";
return false;
}
if (custom_seed >= kRandomSeedUpperBound) {
LOG(ERROR) << "Seed " << custom_seed << " outside of expected range "
<< "[0, " << kRandomSeedUpperBound << ")";
return false;
}
shuffle_seed = custom_seed;
} else {
std::uniform_int_distribution<uint32_t> dist(0, kRandomSeedUpperBound);
std::random_device random_dev;
shuffle_seed = dist(random_dev);
}
std::mt19937 randomizer;
randomizer.seed(shuffle_seed);
std::shuffle(tests_.begin(), tests_.end(), randomizer);
fprintf(stdout, "Randomizing with seed %u\n", shuffle_seed);
fflush(stdout);
} else if (command_line->HasSwitch(kGTestRandomSeedFlag)) {
LOG(ERROR) << kGTestRandomSeedFlag << " requires " << kGTestShuffleFlag;
return false;
}
return true;
}
bool TestLauncher::ReorderAndValidateTests() {
bool result = true; bool result = true;
std::unordered_set<std::string> disabled_tests; std::unordered_set<std::string> disabled_tests;
std::unordered_set<std::string> pre_tests; std::unordered_map<std::string, TestInfo> pre_tests;
// Find disabled and pre tests // Find disabled and pre tests
for (const TestInfo& test_info : tests_) { for (const TestInfo& test_info : tests_) {
if (test_info.disabled()) if (test_info.disabled())
disabled_tests.insert(test_info.GetDisabledStrippedName()); disabled_tests.insert(test_info.GetDisabledStrippedName());
if (test_info.pre_test()) if (test_info.pre_test())
pre_tests.insert(test_info.GetDisabledStrippedName()); pre_tests[test_info.GetDisabledStrippedName()] = test_info;
} }
std::vector<TestInfo> tests_to_run;
for (const TestInfo& test_info : tests_) { for (const TestInfo& test_info : tests_) {
// If any test has a matching disabled test, fail and log for audit. // If any test has a matching disabled test, fail and log for audit.
if (base::ContainsKey(disabled_tests, test_info.GetFullName())) { if (base::ContainsKey(disabled_tests, test_info.GetFullName())) {
...@@ -1227,13 +1247,26 @@ bool TestLauncher::ValidateTests() { ...@@ -1227,13 +1247,26 @@ bool TestLauncher::ValidateTests() {
<< " duplicated by a DISABLED_ test"; << " duplicated by a DISABLED_ test";
result = false; result = false;
} }
// Remove pre tests that have a matching subsequent test.
pre_tests.erase(test_info.GetPreName()); // Passes on PRE tests, those will append when final test is found.
if (test_info.pre_test())
continue;
std::vector<TestInfo> test_sequence;
test_sequence.push_back(test_info);
// Move Pre Tests prior to final test in order.
while (base::ContainsKey(pre_tests, test_sequence.back().GetPreName())) {
test_sequence.push_back(pre_tests[test_sequence.back().GetPreName()]);
pre_tests.erase(test_sequence.back().GetDisabledStrippedName());
}
tests_to_run.insert(tests_to_run.end(), test_sequence.rbegin(),
test_sequence.rend());
} }
tests_ = std::move(tests_to_run);
// If any tests remain in pre_tests set, fail and log for audit. // If any tests remain in |pre_tests| map, fail and log for audit.
for (const std::string& pre_test : pre_tests) { for (const auto& i : pre_tests) {
LOG(ERROR) << pre_test << " is an orphaned pre test"; LOG(ERROR) << i.first << " is an orphaned pre test";
result = false; result = false;
} }
return result; return result;
...@@ -1296,14 +1329,14 @@ void TestLauncher::RunTests() { ...@@ -1296,14 +1329,14 @@ void TestLauncher::RunTests() {
// Count tests in the binary, before we apply filter and sharding. // Count tests in the binary, before we apply filter and sharding.
test_found_count++; test_found_count++;
std::string test_name_no_disabled = test_info.GetDisabledStrippedName(); std::string prefix_stripped_name = test_info.GetPrefixStrippedName();
// Skip the test that doesn't match the filter (if given). // Skip the test that doesn't match the filter (if given).
if (has_at_least_one_positive_filter_) { if (has_at_least_one_positive_filter_) {
bool found = false; bool found = false;
for (auto filter : positive_test_filter_) { for (auto filter : positive_test_filter_) {
if (MatchPattern(test_name, filter) || if (MatchPattern(test_name, filter) ||
MatchPattern(test_name_no_disabled, filter)) { MatchPattern(prefix_stripped_name, filter)) {
found = true; found = true;
break; break;
} }
...@@ -1316,7 +1349,7 @@ void TestLauncher::RunTests() { ...@@ -1316,7 +1349,7 @@ void TestLauncher::RunTests() {
bool excluded = false; bool excluded = false;
for (auto filter : negative_test_filter_) { for (auto filter : negative_test_filter_) {
if (MatchPattern(test_name, filter) || if (MatchPattern(test_name, filter) ||
MatchPattern(test_name_no_disabled, filter)) { MatchPattern(prefix_stripped_name, filter)) {
excluded = true; excluded = true;
break; break;
} }
...@@ -1328,14 +1361,7 @@ void TestLauncher::RunTests() { ...@@ -1328,14 +1361,7 @@ void TestLauncher::RunTests() {
// Tests with the name XYZ will cause tests with the name PRE_XYZ to run. We // Tests with the name XYZ will cause tests with the name PRE_XYZ to run. We
// should bucket all of these tests together. // should bucket all of these tests together.
std::string test_name_to_bucket = test_name; if (Hash(prefix_stripped_name) % total_shards_ !=
size_t index_of_first_period = test_name_to_bucket.find(".");
if (index_of_first_period == std::string::npos)
index_of_first_period = 0;
base::ReplaceSubstringsAfterOffset(
&test_name_to_bucket, index_of_first_period, "PRE_", std::string());
if (Hash(test_name_to_bucket) % total_shards_ !=
static_cast<uint32_t>(shard_index_)) { static_cast<uint32_t>(shard_index_)) {
continue; continue;
} }
...@@ -1344,25 +1370,13 @@ void TestLauncher::RunTests() { ...@@ -1344,25 +1370,13 @@ void TestLauncher::RunTests() {
// locations only for those tests that were run as part of this shard. // locations only for those tests that were run as part of this shard.
results_tracker_.AddTestLocation(test_name, test_info.file(), results_tracker_.AddTestLocation(test_name, test_info.file(),
test_info.line()); test_info.line());
if (!test_info.pre_test()) {
bool should_run_test = launcher_delegate_->ShouldRunTest(
test_info.test_case_name(), test_info.test_name());
if (should_run_test) {
// Only a subset of tests that are run require placeholders -- namely, // Only a subset of tests that are run require placeholders -- namely,
// those that will output results. // those that will output results.
results_tracker_.AddTestPlaceholder(test_name); results_tracker_.AddTestPlaceholder(test_name);
test_names.push_back(test_name);
}
} }
if (shuffle_) { test_names.push_back(test_name);
std::mt19937 randomizer;
randomizer.seed(shuffle_seed_);
std::shuffle(test_names.begin(), test_names.end(), randomizer);
fprintf(stdout, "Randomizing with seed %u\n", shuffle_seed_);
fflush(stdout);
} }
// Save an early test summary in case the launcher crashes or gets killed. // Save an early test summary in case the launcher crashes or gets killed.
...@@ -1381,8 +1395,12 @@ bool TestLauncher::RunRetryTests() { ...@@ -1381,8 +1395,12 @@ bool TestLauncher::RunRetryTests() {
// Number of retries in this iteration. // Number of retries in this iteration.
size_t retry_count = 0; size_t retry_count = 0;
while (!tests_to_retry_.empty() && retry_count < retry_limit_) { while (!tests_to_retry_.empty() && retry_count < retry_limit_) {
std::vector<std::string> test_names(tests_to_retry_.begin(), // Retry all tests that depend on a failing test.
tests_to_retry_.end()); std::vector<std::string> test_names;
for (const TestInfo& test_info : tests_) {
if (base::ContainsKey(tests_to_retry_, test_info.GetPrefixStrippedName()))
test_names.push_back(test_info.GetFullName());
}
tests_to_retry_.clear(); tests_to_retry_.clear();
size_t retry_started_count = size_t retry_started_count =
......
...@@ -57,17 +57,13 @@ class TestLauncherDelegate { ...@@ -57,17 +57,13 @@ class TestLauncherDelegate {
virtual bool WillRunTest(const std::string& test_case_name, virtual bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) = 0; const std::string& test_name) = 0;
// Called before a test is considered for running. This method must return
// true if the TestLauncher is expected to run the test, provided it is part
// of the current shard.
virtual bool ShouldRunTest(const std::string& test_case_name,
const std::string& test_name) = 0;
// Called to make the delegate run the specified tests. The delegate must // Called to make the delegate run the specified tests. The delegate must
// return the number of actual tests it's going to run (can be smaller, // return the number of actual tests it's going to run (can be smaller,
// equal to, or larger than size of |test_names|). It must also call // equal to, or larger than size of |test_names|). It must also call
// |test_launcher|'s OnTestFinished method once per every run test, // |test_launcher|'s OnTestFinished method once per every run test,
// regardless of its success. // regardless of its success.
// If test_names contains PRE_ chained tests, they must be properly ordered
// and consecutive.
virtual size_t RunTests(TestLauncher* test_launcher, virtual size_t RunTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) = 0; const std::vector<std::string>& test_names) = 0;
...@@ -178,10 +174,18 @@ class TestLauncher { ...@@ -178,10 +174,18 @@ class TestLauncher {
// Returns false if delegate fails to return tests. // Returns false if delegate fails to return tests.
bool InitTests(); bool InitTests();
// Some of the TestLauncherDelegate implementations don't call into gtest
// until they've already split into test-specific processes. This results
// in gtest's native shuffle implementation attempting to shuffle one test.
// Shuffling the list of tests in the test launcher (before the delegate
// gets involved) ensures that the entire shard is shuffled.
bool ShuffleTests(CommandLine* command_line);
// Move all PRE_ tests prior to the final test in order.
// Validate tests names. This includes no name conflict between tests // Validate tests names. This includes no name conflict between tests
// without DISABLED_ prefix, and orphaned PRE_ tests. // without DISABLED_ prefix, and orphaned PRE_ tests.
// Returns false if any violation is found. // Returns false if any violation is found.
bool ValidateTests(); bool ReorderAndValidateTests();
// Runs all tests in current iteration. // Runs all tests in current iteration.
void RunTests(); void RunTests();
...@@ -263,11 +267,7 @@ class TestLauncher { ...@@ -263,11 +267,7 @@ class TestLauncher {
bool force_run_broken_tests_; bool force_run_broken_tests_;
// Tests to retry in this iteration. // Tests to retry in this iteration.
std::set<std::string> tests_to_retry_; std::unordered_set<std::string> tests_to_retry_;
// Support for test shuffling, just like gtest does.
bool shuffle_;
uint32_t shuffle_seed_;
TestResultsTracker results_tracker_; TestResultsTracker results_tracker_;
......
...@@ -34,9 +34,6 @@ class MockTestLauncherDelegate : public TestLauncherDelegate { ...@@ -34,9 +34,6 @@ class MockTestLauncherDelegate : public TestLauncherDelegate {
MOCK_METHOD2(WillRunTest, MOCK_METHOD2(WillRunTest,
bool(const std::string& test_case_name, bool(const std::string& test_case_name,
const std::string& test_name)); const std::string& test_name));
MOCK_METHOD2(ShouldRunTest,
bool(const std::string& test_case_name,
const std::string& test_name));
MOCK_METHOD2(RunTests, MOCK_METHOD2(RunTests,
size_t(TestLauncher* test_launcher, size_t(TestLauncher* test_launcher,
const std::vector<std::string>& test_names)); const std::vector<std::string>& test_names));
...@@ -77,8 +74,6 @@ class TestLauncherTest : public testing::Test { ...@@ -77,8 +74,6 @@ class TestLauncherTest : public testing::Test {
testing::Return(true))); testing::Return(true)));
EXPECT_CALL(delegate, WillRunTest(_, _)) EXPECT_CALL(delegate, WillRunTest(_, _))
.WillRepeatedly(testing::Return(true)); .WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, ShouldRunTest(_, _))
.WillRepeatedly(testing::Return(true));
} }
std::unique_ptr<CommandLine> command_line; std::unique_ptr<CommandLine> command_line;
...@@ -148,6 +143,20 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) { ...@@ -148,6 +143,20 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
// Test TestLauncher should reorder PRE_ tests before delegate
TEST_F(TestLauncherTest, ReorderPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls();
using ::testing::_;
std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(testing::Return(0));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
// Test TestLauncher "gtest_filter" switch. // Test TestLauncher "gtest_filter" switch.
TEST_F(TestLauncherTest, UsingCommandLineFilter) { TEST_F(TestLauncherTest, UsingCommandLineFilter) {
AddMockedTests("Test", AddMockedTests("Test",
...@@ -165,6 +174,21 @@ TEST_F(TestLauncherTest, UsingCommandLineFilter) { ...@@ -165,6 +174,21 @@ TEST_F(TestLauncherTest, UsingCommandLineFilter) {
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
// Test TestLauncher gtest filter will include pre tests
TEST_F(TestLauncherTest, FilterIncludePreTest) {
AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
SetUpExpectCalls();
command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.PRE_firstTest",
"Test.firstTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(testing::Return(0));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
// Test TestLauncher "gtest_repeat" switch. // Test TestLauncher "gtest_repeat" switch.
TEST_F(TestLauncherTest, RunningMultipleIterations) { TEST_F(TestLauncherTest, RunningMultipleIterations) {
AddMockedTests("Test", {"firstTest"}); AddMockedTests("Test", {"firstTest"});
...@@ -221,6 +245,30 @@ TEST_F(TestLauncherTest, FailOnRetryTests) { ...@@ -221,6 +245,30 @@ TEST_F(TestLauncherTest, FailOnRetryTests) {
EXPECT_FALSE(test_launcher.Run(command_line.get())); EXPECT_FALSE(test_launcher.Run(command_line.get()));
} }
// Test TestLauncher should retry all PRE_ chained tests
TEST_F(TestLauncherTest, RetryPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls();
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(3)));
std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate,
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(3)));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
// Test TestLauncher run disabled unit tests switch. // Test TestLauncher run disabled unit tests switch.
TEST_F(TestLauncherTest, RunDisabledTests) { TEST_F(TestLauncherTest, RunDisabledTests) {
AddMockedTests("DISABLED_TestDisabled", {"firstTest"}); AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
......
...@@ -676,14 +676,6 @@ bool UnitTestLauncherDelegate::WillRunTest(const std::string& test_case_name, ...@@ -676,14 +676,6 @@ bool UnitTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true; return true;
} }
bool UnitTestLauncherDelegate::ShouldRunTest(const std::string& test_case_name,
const std::string& test_name) {
DCHECK(thread_checker_.CalledOnValidThread());
// There is no additional logic to disable specific tests.
return true;
}
size_t UnitTestLauncherDelegate::RunTests( size_t UnitTestLauncherDelegate::RunTests(
TestLauncher* test_launcher, TestLauncher* test_launcher,
const std::vector<std::string>& test_names) { const std::vector<std::string>& test_names) {
......
...@@ -136,8 +136,6 @@ class UnitTestLauncherDelegate : public TestLauncherDelegate { ...@@ -136,8 +136,6 @@ class UnitTestLauncherDelegate : public TestLauncherDelegate {
bool GetTests(std::vector<TestIdentifier>* output) override; bool GetTests(std::vector<TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name, bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override; const std::string& test_name) override;
bool ShouldRunTest(const std::string& test_case_name,
const std::string& test_name) override;
size_t RunTests(TestLauncher* test_launcher, size_t RunTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override; const std::vector<std::string>& test_names) override;
size_t RetryTests(TestLauncher* test_launcher, size_t RetryTests(TestLauncher* test_launcher,
......
...@@ -143,8 +143,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -143,8 +143,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
bool GetTests(std::vector<base::TestIdentifier>* output) override; bool GetTests(std::vector<base::TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name, bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override; const std::string& test_name) override;
bool ShouldRunTest(const std::string& test_case_name,
const std::string& test_name) override;
size_t RunTests(base::TestLauncher* test_launcher, size_t RunTests(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override; const std::vector<std::string>& test_names) override;
size_t RetryTests(base::TestLauncher* test_launcher, size_t RetryTests(base::TestLauncher* test_launcher,
...@@ -198,12 +196,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -198,12 +196,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
void DoRunTests(base::TestLauncher* test_launcher, void DoRunTests(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names); const std::vector<std::string>& test_names);
// Launches test named |test_name| using parallel launcher,
// given result of PRE_ test |pre_test_result|.
void RunDependentTest(base::TestLauncher* test_launcher,
const std::string test_name,
const base::TestResult& pre_test_result);
// Relays timeout notification from the TestLauncher (by way of a // Relays timeout notification from the TestLauncher (by way of a
// ProcessLifetimeObserver) to the caller's content::TestLauncherDelegate. // ProcessLifetimeObserver) to the caller's content::TestLauncherDelegate.
void OnTestTimedOut(const base::CommandLine& command_line); void OnTestTimedOut(const base::CommandLine& command_line);
...@@ -225,11 +217,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -225,11 +217,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
content::TestLauncherDelegate* launcher_delegate_; content::TestLauncherDelegate* launcher_delegate_;
// Store dependent test name (map is indexed by full test name).
typedef std::map<std::string, std::string> DependentTestMap;
DependentTestMap dependent_test_map_;
DependentTestMap reverse_dependent_test_map_;
// Store unique data directory prefix for test names (without PRE_ prefixes). // Store unique data directory prefix for test names (without PRE_ prefixes).
// PRE_ tests and tests that depend on them must share the same // PRE_ tests and tests that depend on them must share the same
// data directory. Using test name as directory name leads to too long // data directory. Using test name as directory name leads to too long
...@@ -239,9 +226,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -239,9 +226,6 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
typedef std::map<std::string, base::FilePath> UserDataDirMap; typedef std::map<std::string, base::FilePath> UserDataDirMap;
UserDataDirMap user_data_dir_map_; UserDataDirMap user_data_dir_map_;
// Store names of all seen tests to properly handle PRE_ tests.
std::set<std::string> all_test_names_;
// Temporary directory for user data directories. // Temporary directory for user data directories.
base::ScopedTempDir temp_dir_; base::ScopedTempDir temp_dir_;
...@@ -255,14 +239,11 @@ bool WrapperTestLauncherDelegate::GetTests( ...@@ -255,14 +239,11 @@ bool WrapperTestLauncherDelegate::GetTests(
} }
bool IsPreTestName(const std::string& test_name) { bool IsPreTestName(const std::string& test_name) {
return base::StartsWith(test_name, kPreTestPrefix, return test_name.find(kPreTestPrefix) != std::string::npos;
base::CompareCase::SENSITIVE);
} }
bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name, bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
const std::string& test_name) { const std::string& test_name) {
all_test_names_.insert(test_case_name + "." + test_name);
if (base::StartsWith(test_name, kManualTestPrefix, if (base::StartsWith(test_name, kManualTestPrefix,
base::CompareCase::SENSITIVE) && base::CompareCase::SENSITIVE) &&
!base::CommandLine::ForCurrentProcess()->HasSwitch(kRunManualTestsFlag)) { !base::CommandLine::ForCurrentProcess()->HasSwitch(kRunManualTestsFlag)) {
...@@ -272,101 +253,32 @@ bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name, ...@@ -272,101 +253,32 @@ bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true; return true;
} }
bool WrapperTestLauncherDelegate::ShouldRunTest(
const std::string& test_case_name,
const std::string& test_name) {
if (!WillRunTest(test_case_name, test_name))
return false;
if (IsPreTestName(test_name)) {
// We will actually run PRE_ tests, but to ensure they run on the same shard
// as dependent tests, handle all these details internally.
return false;
}
return true;
}
std::string GetPreTestName(const std::string& full_name) {
size_t dot_pos = full_name.find('.');
CHECK_NE(dot_pos, std::string::npos);
std::string test_case_name = full_name.substr(0, dot_pos);
std::string test_name = full_name.substr(dot_pos + 1);
return test_case_name + "." + kPreTestPrefix + test_name;
}
size_t WrapperTestLauncherDelegate::RunTests( size_t WrapperTestLauncherDelegate::RunTests(
base::TestLauncher* test_launcher, base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) { const std::vector<std::string>& test_names) {
dependent_test_map_.clear();
reverse_dependent_test_map_.clear();
user_data_dir_map_.clear(); user_data_dir_map_.clear();
// Number of additional tests to run because of dependencies. std::vector<std::string> test_list;
size_t additional_tests_to_run_count = 0;
// Compute dependencies of tests to be run.
for (const std::string& test_name : test_names) {
std::string full_name(test_name);
std::string pre_test_name(GetPreTestName(full_name));
while (base::ContainsKey(all_test_names_, pre_test_name)) {
additional_tests_to_run_count++;
DCHECK(!base::ContainsKey(dependent_test_map_, pre_test_name));
dependent_test_map_[pre_test_name] = full_name;
DCHECK(!base::ContainsKey(reverse_dependent_test_map_, full_name));
reverse_dependent_test_map_[full_name] = pre_test_name;
full_name = pre_test_name;
pre_test_name = GetPreTestName(pre_test_name);
}
}
for (const std::string& test_name : test_names) { for (const std::string& test_name : test_names) {
std::string full_name(test_name); // Stack all dependent tests and run them sequentially.
// Make sure no PRE_ tests were requested explicitly. test_list.push_back(test_name);
DCHECK_EQ(full_name, RemoveAnyPrePrefixes(full_name)); if (!IsPreTestName(test_name)) {
if (!base::ContainsKey(user_data_dir_map_, test_name)) {
if (!base::ContainsKey(user_data_dir_map_, full_name)) {
base::FilePath temp_dir; base::FilePath temp_dir;
CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(), CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(),
FILE_PATH_LITERAL("d"), &temp_dir)); FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[full_name] = temp_dir; user_data_dir_map_[test_name] = temp_dir;
} }
// If the test has any dependencies, get to the root and start with that.
while (base::ContainsKey(reverse_dependent_test_map_, full_name))
full_name = GetPreTestName(full_name);
std::vector<std::string> test_list;
test_list.push_back(full_name);
DoRunTests(test_launcher, test_list); DoRunTests(test_launcher, test_list);
test_list.clear();
} }
}
return test_names.size() + additional_tests_to_run_count; return test_names.size();
} }
size_t WrapperTestLauncherDelegate::RetryTests( size_t WrapperTestLauncherDelegate::RetryTests(
base::TestLauncher* test_launcher, base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) { const std::vector<std::string>& test_names) {
// List of tests we can kick off right now, depending on no other tests.
std::vector<std::string> tests_to_run_now;
// We retry at least the tests requested to retry.
std::set<std::string> test_names_set(test_names.begin(), test_names.end());
// In the face of PRE_ tests, we need to retry the entire chain of tests,
// from the very first one.
for (const std::string& test_name : test_names) {
std::string name(test_name);
while (base::ContainsKey(reverse_dependent_test_map_, name)) {
name = reverse_dependent_test_map_[name];
test_names_set.insert(name);
}
}
// Discard user data directories from any previous runs. Start with // Discard user data directories from any previous runs. Start with
// fresh state. // fresh state.
for (const auto& it : user_data_dir_map_) { for (const auto& it : user_data_dir_map_) {
...@@ -377,7 +289,7 @@ size_t WrapperTestLauncherDelegate::RetryTests( ...@@ -377,7 +289,7 @@ size_t WrapperTestLauncherDelegate::RetryTests(
} }
user_data_dir_map_.clear(); user_data_dir_map_.clear();
for (const std::string& full_name : test_names_set) { for (const std::string& full_name : test_names) {
// Make sure PRE_ tests and tests that depend on them share the same // Make sure PRE_ tests and tests that depend on them share the same
// data directory - based it on the test name without prefixes. // data directory - based it on the test name without prefixes.
std::string test_name_no_pre(RemoveAnyPrePrefixes(full_name)); std::string test_name_no_pre(RemoveAnyPrePrefixes(full_name));
...@@ -387,15 +299,11 @@ size_t WrapperTestLauncherDelegate::RetryTests( ...@@ -387,15 +299,11 @@ size_t WrapperTestLauncherDelegate::RetryTests(
FILE_PATH_LITERAL("d"), &temp_dir)); FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[test_name_no_pre] = temp_dir; user_data_dir_map_[test_name_no_pre] = temp_dir;
} }
std::string pre_test_name = GetPreTestName(full_name);
if (!base::ContainsKey(test_names_set, pre_test_name))
tests_to_run_now.push_back(full_name);
} }
DoRunTests(test_launcher, tests_to_run_now); DoRunTests(test_launcher, test_names);
return test_names_set.size(); return test_names.size();
} }
void WrapperTestLauncherDelegate::DoRunTests( void WrapperTestLauncherDelegate::DoRunTests(
...@@ -455,30 +363,6 @@ void WrapperTestLauncherDelegate::DoRunTests( ...@@ -455,30 +363,6 @@ void WrapperTestLauncherDelegate::DoRunTests(
std::move(observer)); std::move(observer));
} }
void WrapperTestLauncherDelegate::RunDependentTest(
base::TestLauncher* test_launcher,
const std::string test_name,
const base::TestResult& pre_test_result) {
if (pre_test_result.status == base::TestResult::TEST_SUCCESS) {
// Only run the dependent test if PRE_ test succeeded.
std::vector<std::string> test_list;
test_list.push_back(test_name);
DoRunTests(test_launcher, test_list);
} else {
// Otherwise mark the test as a failure.
base::TestResult test_result;
test_result.full_name = test_name;
test_result.status = base::TestResult::TEST_FAILURE;
test_launcher->OnTestFinished(test_result);
if (base::ContainsKey(dependent_test_map_, test_name)) {
RunDependentTest(test_launcher,
dependent_test_map_[test_name],
test_result);
}
}
}
void WrapperTestLauncherDelegate::OnTestTimedOut( void WrapperTestLauncherDelegate::OnTestTimedOut(
const base::CommandLine& command_line) { const base::CommandLine& command_line) {
launcher_delegate_->OnTestTimedOut(command_line); launcher_delegate_->OnTestTimedOut(command_line);
...@@ -543,19 +427,14 @@ void WrapperTestLauncherDelegate::GTestCallback( ...@@ -543,19 +427,14 @@ void WrapperTestLauncherDelegate::GTestCallback(
launcher_delegate_->PostRunTest(&result); launcher_delegate_->PostRunTest(&result);
if (base::ContainsKey(dependent_test_map_, test_name)) {
RunDependentTest(test_launcher, dependent_test_map_[test_name], result);
} else {
// No other tests depend on this, we can delete the temporary directory now. // No other tests depend on this, we can delete the temporary directory now.
// Do so to avoid too many temporary files using lots of disk space. // Do so to avoid too many temporary files using lots of disk space.
std::string test_name_no_pre(RemoveAnyPrePrefixes(test_name)); if (base::ContainsKey(user_data_dir_map_, test_name)) {
if (base::ContainsKey(user_data_dir_map_, test_name_no_pre)) { if (!base::DeleteFile(user_data_dir_map_[test_name], true)) {
if (!base::DeleteFile(user_data_dir_map_[test_name_no_pre], true)) {
LOG(WARNING) << "Failed to delete " LOG(WARNING) << "Failed to delete "
<< user_data_dir_map_[test_name_no_pre].value(); << user_data_dir_map_[test_name].value();
}
user_data_dir_map_.erase(test_name_no_pre);
} }
user_data_dir_map_.erase(test_name);
} }
test_launcher->OnTestFinished(result); test_launcher->OnTestFinished(result);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment