Commit bb2dfc38 authored by Ilia Samsonov's avatar Ilia Samsonov Committed by Commit Bot

Removed ProcessLifetimeObserver from TestLauncher.

The goal of this cl is to move all child process logic to TestLauncher.
This should simplify the gtest launcher structure and clarify
each class responsibilities.

TestRunner controls running test processes across sequence runners.

TestLauncherDelegate is now limited to provide test specific needs.
Command line for tests, timeout, result processing, etc.

This allows us to remove the ProcessLifetimeObserver and its extending
classes

The original change has been reverted since result processing
was not thread safe.

Bug: 936248,848465
Change-Id: I2468cf2e92901c7f2c4a6f42838be219397a7b0a
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1721429
Commit-Queue: Ilia Samsonov <isamsonov@google.com>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarErik Chen <erikchen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#682916}
parent 3e858960
......@@ -268,7 +268,7 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
const LaunchOptions& options,
int flags,
TimeDelta timeout,
ProcessLifetimeObserver* observer,
TestLauncherDelegate* delegate,
bool* was_timeout) {
TimeTicks start_time(TimeTicks::Now());
......@@ -410,8 +410,8 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
}
if (!did_exit) {
if (observer)
observer->OnTimedOut(command_line);
if (delegate)
delegate->OnTestTimedOut(command_line);
*was_timeout = true;
exit_code = -1; // Set a non-zero exit code to signal a failure.
......@@ -454,15 +454,29 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
return exit_code;
}
void DoLaunchChildTestProcess(
struct ChildProcessResults {
// Total time for DoLaunchChildTest Process to execute.
TimeDelta elapsed_time;
// If stdio is redirected, pass output file content.
std::string output_file_contents;
// True if child process timed out.
bool was_timeout = false;
// Exit code of child process.
int exit_code;
};
// This launches the child test process, waits for it to complete,
// and returns child process results.
ChildProcessResults DoLaunchChildTestProcess(
const CommandLine& command_line,
TimeDelta timeout,
const TestLauncher::LaunchOptions& test_launch_options,
bool redirect_stdio,
SingleThreadTaskRunner* task_runner,
std::unique_ptr<ProcessLifetimeObserver> observer) {
TestLauncherDelegate* delegate) {
TimeTicks start_time = TimeTicks::Now();
ChildProcessResults result;
ScopedFILE output_file;
FilePath output_filename;
if (redirect_stdio) {
......@@ -512,34 +526,26 @@ void DoLaunchChildTestProcess(
#endif // !defined(OS_WIN)
bool was_timeout = false;
int exit_code = LaunchChildTestProcessWithOptions(
command_line, options, test_launch_options.flags, timeout, observer.get(),
&was_timeout);
result.exit_code = LaunchChildTestProcessWithOptions(
command_line, options, test_launch_options.flags, timeout, delegate,
&result.was_timeout);
std::string output_file_contents;
if (redirect_stdio) {
fflush(output_file.get());
output_file.reset();
// Reading the file can sometimes fail when the process was killed midflight
// (e.g. on test suite timeout): https://crbug.com/826408. Attempt to read
// the output file anyways, but do not crash on failure in this case.
CHECK(ReadFileToString(output_filename, &output_file_contents) ||
exit_code != 0);
CHECK(ReadFileToString(output_filename, &result.output_file_contents) ||
result.exit_code != 0);
if (!DeleteFile(output_filename, false)) {
// This needs to be non-fatal at least for Windows.
LOG(WARNING) << "Failed to delete " << output_filename.AsUTF8Unsafe();
}
}
// Invoke OnCompleted on the thread it was originating from, not on a worker
// pool thread.
task_runner->PostTask(
FROM_HERE,
BindOnce(&ProcessLifetimeObserver::OnCompleted, std::move(observer),
exit_code, TimeTicks::Now() - start_time, was_timeout,
output_file_contents));
result.elapsed_time = TimeTicks::Now() - start_time;
return result;
}
std::vector<std::string> ExtractTestsFromFilter(const std::string& filter,
......@@ -556,6 +562,122 @@ std::vector<std::string> ExtractTestsFromFilter(const std::string& filter,
return tests;
}
// A test runner object to run tests across a number of sequence runners,
// and control running pre tests in sequence.
class TestRunner {
public:
explicit TestRunner(TestLauncher* launcher,
size_t runner_count = 1u,
size_t batch_size = 1u)
: launcher_(launcher),
runner_count_(runner_count),
batch_size_(batch_size),
weak_ptr_factory_(this) {}
// Sets |test_names| to be run, with |batch_size| tests per process.
// Posts LaunchNextTask |runner_count| number of times, each with a separate
// task runner.
void Run(const std::vector<std::string>& test_names);
private:
// Called to check if the next batch has to run on the same
// sequence task runner and using the same temporary directory.
bool ShouldReuseStateFromLastBatch(
const std::vector<std::string>& test_names) {
return test_names.size() == 1u &&
test_names.front().find(kPreTestPrefix) != std::string::npos;
}
// Launch next child process on |task_runner|,
// and clear |temp_dir| from previous process.
void LaunchNextTask(scoped_refptr<TaskRunner> task_runner, FilePath temp_dir);
// Forward |temp_dir| and Launch next task on main thread.
// The method is called on |task_runner|.
void ClearAndLaunchNext(scoped_refptr<TaskRunner> main_thread_runner,
scoped_refptr<TaskRunner> task_runner,
const FilePath& temp_dir) {
main_thread_runner->PostTask(
FROM_HERE,
BindOnce(&TestRunner::LaunchNextTask, weak_ptr_factory_.GetWeakPtr(),
task_runner, temp_dir));
}
ThreadChecker thread_checker_;
std::vector<std::string> tests_to_run_;
TestLauncher* const launcher_;
std::vector<scoped_refptr<TaskRunner>> task_runners_;
// Number of sequenced task runners to use.
const size_t runner_count_;
// Number of TaskRunners that have finished.
size_t runners_done_ = 0;
// Number of tests per process, 0 is special case for all tests.
const size_t batch_size_;
RunLoop run_loop_;
base::WeakPtrFactory<TestRunner> weak_ptr_factory_;
};
void TestRunner::Run(const std::vector<std::string>& test_names) {
DCHECK(thread_checker_.CalledOnValidThread());
// No sequence runners, fail immediately.
CHECK_GT(runner_count_, 0u);
tests_to_run_ = test_names;
// Reverse test order to avoid coping the whole vector when removing tests.
std::reverse(tests_to_run_.begin(), tests_to_run_.end());
runners_done_ = 0;
task_runners_.clear();
for (size_t i = 0; i < runner_count_; i++) {
task_runners_.push_back(CreateSequencedTaskRunnerWithTraits(
{MayBlock(), TaskShutdownBehavior::BLOCK_SHUTDOWN}));
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&TestRunner::LaunchNextTask, weak_ptr_factory_.GetWeakPtr(),
task_runners_.back(), FilePath()));
}
run_loop_.Run();
}
void TestRunner::LaunchNextTask(scoped_refptr<TaskRunner> task_runner,
FilePath temp_dir) {
DCHECK(thread_checker_.CalledOnValidThread());
// delete previous temporary directory
if (!temp_dir.empty() && !DeleteFile(temp_dir, false)) {
// This needs to be non-fatal at least for Windows.
LOG(WARNING) << "Failed to delete " << temp_dir.AsUTF8Unsafe();
}
// No more tests to run, finish sequence.
if (tests_to_run_.empty()) {
runners_done_++;
// All sequence runners are done, quit the loop.
if (runners_done_ == runner_count_)
run_loop_.QuitWhenIdle();
return;
}
CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
bool post_to_current_runner = true;
size_t batch_size = (batch_size_ == 0) ? tests_to_run_.size() : batch_size_;
while (post_to_current_runner && !tests_to_run_.empty()) {
batch_size = std::min(batch_size, tests_to_run_.size());
std::vector<std::string> batch(tests_to_run_.rbegin(),
tests_to_run_.rbegin() + batch_size);
tests_to_run_.erase(tests_to_run_.end() - batch_size, tests_to_run_.end());
task_runner->PostTask(
FROM_HERE,
BindOnce(&TestLauncher::LaunchChildGTestProcess, Unretained(launcher_),
ThreadTaskRunnerHandle::Get(), batch, temp_dir));
post_to_current_runner = ShouldReuseStateFromLastBatch(batch);
}
task_runner->PostTask(
FROM_HERE,
BindOnce(&TestRunner::ClearAndLaunchNext, Unretained(this),
ThreadTaskRunnerHandle::Get(), task_runner, temp_dir));
}
} // namespace
const char kGTestBreakOnFailure[] = "gtest_break_on_failure";
......@@ -740,23 +862,44 @@ bool TestLauncher::Run(CommandLine* command_line) {
}
void TestLauncher::LaunchChildGTestProcess(
const CommandLine& command_line,
const std::string& wrapper,
TimeDelta timeout,
const LaunchOptions& options,
std::unique_ptr<ProcessLifetimeObserver> observer) {
DCHECK(thread_checker_.CalledOnValidThread());
scoped_refptr<TaskRunner> task_runner,
const std::vector<std::string>& test_names,
const FilePath& temp_dir) {
FilePath result_file;
CommandLine cmd_line =
launcher_delegate_->GetCommandLine(test_names, temp_dir, &result_file);
// Record the exact command line used to launch the child.
CommandLine new_command_line(
PrepareCommandLineForGTest(command_line, wrapper));
PrepareCommandLineForGTest(cmd_line, launcher_delegate_->GetWrapper()));
LaunchOptions options;
options.flags = launcher_delegate_->GetLaunchOptions();
ChildProcessResults process_results = DoLaunchChildTestProcess(
new_command_line, launcher_delegate_->GetTimeout() * test_names.size(),
options, redirect_stdio_, launcher_delegate_);
PostTask(
// Invoke ProcessTestResults on the original thread, not
// on a worker pool thread.
task_runner->PostTask(
FROM_HERE,
{ThreadPool(), MayBlock(), TaskShutdownBehavior::BLOCK_SHUTDOWN},
BindOnce(&DoLaunchChildTestProcess, new_command_line, timeout, options,
redirect_stdio_, RetainedRef(ThreadTaskRunnerHandle::Get()),
std::move(observer)));
BindOnce(&TestLauncher::ProcessTestResults, Unretained(this), test_names,
result_file, process_results.output_file_contents,
process_results.elapsed_time, process_results.exit_code,
process_results.was_timeout));
}
void TestLauncher::ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& result_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) {
std::vector<TestResult> test_results = launcher_delegate_->ProcessTestResults(
test_names, result_file, output, elapsed_time, exit_code, was_timeout);
for (const auto& result : test_results)
OnTestFinished(result);
}
void TestLauncher::OnTestFinished(const TestResult& original_result) {
......@@ -859,8 +1002,6 @@ void TestLauncher::OnTestFinished(const TestResult& original_result) {
exit(1);
}
if (test_finished_count_ == test_started_count_)
RunLoop::QuitCurrentWhenIdleDeprecated();
}
// Helper used to parse test filter files. Syntax is documented in
......@@ -1385,10 +1526,11 @@ void TestLauncher::RunTests() {
broken_threshold_ = std::max(static_cast<size_t>(20), test_found_count / 10);
test_started_count_ = launcher_delegate_->RunTests(this, test_names);
test_started_count_ = test_names.size();
if (test_started_count_ > 0)
RunLoop().Run();
TestRunner test_runner(this, parallel_jobs_,
launcher_delegate_->GetBatchSize());
test_runner.Run(test_names);
}
bool TestLauncher::RunRetryTests() {
......@@ -1403,9 +1545,7 @@ bool TestLauncher::RunRetryTests() {
}
tests_to_retry_.clear();
size_t retry_started_count =
launcher_delegate_->RetryTests(this, test_names);
size_t retry_started_count = test_names.size();
test_started_count_ += retry_started_count;
// Only invoke RunLoop if there are any tasks to run.
......@@ -1416,8 +1556,8 @@ bool TestLauncher::RunRetryTests() {
retry_started_count > 1 ? "s" : "", retry_count);
fflush(stdout);
RunLoop().Run();
TestRunner test_runner(this);
test_runner.Run(test_names);
retry_count++;
}
return tests_to_retry_.empty();
......
......@@ -13,6 +13,7 @@
#include <string>
#include <vector>
#include "base/command_line.h"
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/process/launch.h"
......@@ -26,10 +27,6 @@
namespace base {
class CommandLine;
struct LaunchOptions;
class TestLauncher;
// Constants for GTest command-line flags.
extern const char kGTestFilterFlag[];
extern const char kGTestFlagfileFlag[];
......@@ -57,53 +54,47 @@ class TestLauncherDelegate {
virtual bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) = 0;
// Called to make the delegate run the specified tests. The delegate must
// return the number of actual tests it's going to run (can be smaller,
// equal to, or larger than size of |test_names|). It must also call
// |test_launcher|'s OnTestFinished method once per every run test,
// regardless of its success.
// If test_names contains PRE_ chained tests, they must be properly ordered
// and consecutive.
virtual size_t RunTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) = 0;
// Called to make the delegate retry the specified tests. The delegate must
// return the number of actual tests it's going to retry (can be smaller,
// equal to, or larger than size of |test_names|). It must also call
// |test_launcher|'s OnTestFinished method once per every retried test,
// regardless of its success.
virtual size_t RetryTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) = 0;
virtual ~TestLauncherDelegate();
};
// An observer of child process lifetime events generated by
// LaunchChildGTestProcess.
class ProcessLifetimeObserver {
public:
virtual ~ProcessLifetimeObserver() = default;
// Invoked after a child process finishes, reporting the process |exit_code|,
// child process |elapsed_time|, whether or not the process was terminated as
// a result of a timeout, and the output of the child (stdout and stderr
// together). NOTE: this method is invoked on the main thread.
// Returns test results of child process.
virtual std::vector<TestResult> ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) = 0;
// Called to get the command line for the specified tests.
// |output_file_| is populated with the path to the result file, and must
// be inside |temp_dir|.
virtual CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir,
FilePath* output_file) = 0;
// Invoked when a test process exceeds its runtime, immediately before it is
// terminated. |command_line| is the command line used to launch the process.
// NOTE: this method is invoked on the thread the process is launched on.
virtual void OnTimedOut(const CommandLine& command_line) {}
virtual void OnTestTimedOut(const base::CommandLine& cmd_line) {}
// Invoked after a child process finishes, reporting the process |exit_code|,
// child process |elapsed_time|, whether or not the process was terminated as
// a result of a timeout, and the output of the child (stdout and stderr
// together). NOTE: this method is invoked on the same thread as
// LaunchChildGTestProcess.
virtual void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {}
// Returns the delegate specific wrapper for command line.
// If it is not empty, it is prepended to the final command line.
virtual std::string GetWrapper() = 0;
protected:
ProcessLifetimeObserver() = default;
// Returns the delegate specific flags for launch options.
// The flags are specified in LaunchChildGTestProcessFlags.
virtual int GetLaunchOptions() = 0;
private:
DISALLOW_COPY_AND_ASSIGN(ProcessLifetimeObserver);
// Returns the delegate specific timeout per test.
virtual TimeDelta GetTimeout() = 0;
// Returns the delegate specific batch size.
virtual size_t GetBatchSize() = 0;
protected:
virtual ~TestLauncherDelegate();
};
// Launches tests using a TestLauncherDelegate.
......@@ -151,18 +142,16 @@ class TestLauncher {
// if null, uses command line for current process.
bool Run(CommandLine* command_line = nullptr) WARN_UNUSED_RESULT;
// Launches a child process (assumed to be gtest-based binary) using
// |command_line|. If |wrapper| is not empty, it is prepended to the final
// command line. |observer|, if not null, is used to convey process lifetime
// events to the caller. |observer| is destroyed after its OnCompleted
// method is invoked.
// Launches a child process (assumed to be gtest-based binary) which runs
// tests indicated by |test_names|.
// |task_runner| is used to post results back to the launcher
// on the main thread. |temp_dir| is used for child process files,
// such as user data, result file, and flag_file.
// virtual to mock in testing.
virtual void LaunchChildGTestProcess(
const CommandLine& command_line,
const std::string& wrapper,
TimeDelta timeout,
const LaunchOptions& options,
std::unique_ptr<ProcessLifetimeObserver> observer);
scoped_refptr<TaskRunner> task_runner,
const std::vector<std::string>& test_names,
const FilePath& temp_dir);
// Called when a test has finished running.
void OnTestFinished(const TestResult& result);
......@@ -220,6 +209,13 @@ class TestLauncher {
// wait for child processes). virtual to mock in testing.
virtual void CreateAndStartThreadPool(int num_parallel_jobs);
void ProcessTestResults(const std::vector<std::string>& test_names,
const base::FilePath& result_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout);
// Make sure we don't accidentally call the wrong methods e.g. on the worker
// pool thread. Should be the first member so that it's destroyed last: when
// destroying other members, especially the worker pool, we may check the code
......
......@@ -80,14 +80,19 @@ class NonSfiUnitTestPlatformDelegate : public base::UnitTestPlatformDelegate {
}
private:
bool CreateResultsFile(base::FilePath* path) override {
if (!base::CreateNewTempDirectory(base::FilePath::StringType(), path))
bool CreateResultsFile(const base::FilePath& temp_dir,
base::FilePath* path) override {
if (!base::CreateTemporaryDirInDir(temp_dir, base::FilePath::StringType(),
path))
return false;
*path = path->AppendASCII("test_results.xml");
return true;
}
bool CreateTemporaryFile(base::FilePath* path) override { return false; }
bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) override {
return false;
}
bool GetTests(std::vector<base::TestIdentifier>* output) override {
base::FilePath output_file;
......
......@@ -10,9 +10,11 @@
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/json/json_reader.h"
#include "base/json/json_writer.h"
#include "base/test/launcher/test_launcher.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/scoped_task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -56,6 +58,11 @@ class MockTestLauncher : public TestLauncher {
: TestLauncher(launcher_delegate, parallel_jobs) {}
void CreateAndStartThreadPool(int parallel_jobs) override {}
MOCK_METHOD3(LaunchChildGTestProcess,
void(scoped_refptr<TaskRunner> task_runner,
const std::vector<std::string>& test_names,
const FilePath& temp_dir));
};
// Simple TestLauncherDelegate mock to test TestLauncher flow.
......@@ -65,12 +72,23 @@ class MockTestLauncherDelegate : public TestLauncherDelegate {
MOCK_METHOD2(WillRunTest,
bool(const std::string& test_case_name,
const std::string& test_name));
MOCK_METHOD2(RunTests,
size_t(TestLauncher* test_launcher,
const std::vector<std::string>& test_names));
MOCK_METHOD2(RetryTests,
size_t(TestLauncher* test_launcher,
const std::vector<std::string>& test_names));
MOCK_METHOD6(
ProcessTestResults,
std::vector<TestResult>(const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout));
MOCK_METHOD3(GetCommandLine,
CommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir_,
FilePath* output_file_));
MOCK_METHOD1(IsPreTask, bool(const std::vector<std::string>& test_names));
MOCK_METHOD0(GetWrapper, std::string());
MOCK_METHOD0(GetLaunchOptions, int());
MOCK_METHOD0(GetTimeout, TimeDelta());
MOCK_METHOD0(GetBatchSize, size_t());
};
// Using MockTestLauncher to test TestLauncher.
......@@ -98,13 +116,25 @@ class TestLauncherTest : public testing::Test {
}
// Setup expected delegate calls, and which tests the delegate will return.
void SetUpExpectCalls() {
void SetUpExpectCalls(size_t batch_size = 10) {
using ::testing::_;
EXPECT_CALL(delegate, GetTests(_))
.WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_),
testing::Return(true)));
EXPECT_CALL(delegate, WillRunTest(_, _))
.WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, ProcessTestResults(_, _, _, _, _, _)).Times(0);
EXPECT_CALL(delegate, GetCommandLine(_, _, _))
.WillRepeatedly(testing::Return(CommandLine(CommandLine::NO_PROGRAM)));
EXPECT_CALL(delegate, GetWrapper())
.WillRepeatedly(testing::Return(std::string()));
EXPECT_CALL(delegate, IsPreTask(_)).WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, GetLaunchOptions())
.WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, GetTimeout())
.WillRepeatedly(testing::Return(TimeDelta()));
EXPECT_CALL(delegate, GetBatchSize())
.WillRepeatedly(testing::Return(batch_size));
}
void ReadSummary(FilePath path) {
......@@ -127,18 +157,16 @@ class TestLauncherTest : public testing::Test {
};
// Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P2(OnTestResult, full_name, status) {
ACTION_P3(OnTestResult, launcher, full_name, status) {
TestResult result = GenerateTestResult(full_name, status);
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&TestLauncher::OnTestFinished, Unretained(arg0), result));
arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
Unretained(launcher), result));
}
// Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P(OnTestResult, result) {
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&TestLauncher::OnTestFinished, Unretained(arg0), result));
ACTION_P2(OnTestResult, launcher, result) {
arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
Unretained(launcher), result));
}
// A test and a disabled test cannot share a name.
......@@ -163,11 +191,11 @@ TEST_F(TestLauncherTest, OrphanePreTest) {
EXPECT_FALSE(test_launcher.Run(command_line.get()));
}
// When There are no tests, RunLoop should not be called.
// When There are no tests, delegate should not be called.
TEST_F(TestLauncherTest, EmptyTestSetPasses) {
SetUpExpectCalls();
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)).WillOnce(testing::Return(0));
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _)).Times(0);
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -177,15 +205,17 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
AddMockedTests("Test",
{"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
SetUpExpectCalls();
using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.secondTest", TestResult::TEST_SUCCESS),
testing::Return(2)));
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS),
OnTestResult(&test_launcher, "Test.secondTest",
TestResult::TEST_SUCCESS)));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -193,13 +223,15 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
TEST_F(TestLauncherTest, ReorderPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls();
using ::testing::_;
std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(testing::Return(0));
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -211,12 +243,14 @@ TEST_F(TestLauncherTest, UsingCommandLineFilter) {
command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(1)));
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -225,13 +259,15 @@ TEST_F(TestLauncherTest, FilterIncludePreTest) {
AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
SetUpExpectCalls();
command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.PRE_firstTest",
"Test.firstTest"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(testing::Return(0));
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -241,11 +277,10 @@ TEST_F(TestLauncherTest, RunningMultipleIterations) {
SetUpExpectCalls();
command_line->AppendSwitchASCII("gtest_repeat", "2");
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.Times(2)
.WillRepeatedly(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(1)));
.WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -254,17 +289,16 @@ TEST_F(TestLauncherTest, SuccessOnRetryTests) {
AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls();
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE),
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate,
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(1)));
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_FAILURE))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -274,18 +308,15 @@ TEST_F(TestLauncherTest, FailOnRetryTests) {
AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls();
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE),
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate,
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.Times(3)
.WillRepeatedly(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE),
testing::Return(1)));
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(4)
.WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_FAILURE));
EXPECT_FALSE(test_launcher.Run(command_line.get()));
}
......@@ -293,23 +324,43 @@ TEST_F(TestLauncherTest, FailOnRetryTests) {
TEST_F(TestLauncherTest, RetryPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls();
std::vector<TestResult> results = {
GenerateTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
GenerateTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS)};
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(3)));
std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate,
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(3)));
OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
TestResult::TEST_SUCCESS),
OnTestResult(&test_launcher, "Test.PRE_firstTest",
TestResult::TEST_FAILURE),
OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS)));
std::vector<std::string> tests_names = {"Test.PRE_PRE_firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
TestResult::TEST_SUCCESS));
tests_names = {"Test.PRE_firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.PRE_firstTest",
TestResult::TEST_SUCCESS));
tests_names = {"Test.firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -321,34 +372,38 @@ TEST_F(TestLauncherTest, RunDisabledTests) {
SetUpExpectCalls();
command_line->AppendSwitch("gtest_also_run_disabled_tests");
command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
using ::testing::_;
std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
"Test.firstTest",
"Test.DISABLED_firstTestDisabled"};
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
using ::testing::_;
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
OnTestResult("DISABLED_TestDisabled.firstTest",
OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS),
OnTestResult("Test.DISABLED_firstTestDisabled",
OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
TestResult::TEST_SUCCESS),
testing::Return(3)));
OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
TestResult::TEST_SUCCESS)));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
// Disabled test should disable all pre tests
TEST_F(TestLauncherTest, DisablePreTests) {
AddMockedTests("Test",
{"DISABLED_firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
AddMockedTests("Test", {"DISABLED_firstTest", "PRE_PRE_firstTest",
"PRE_firstTest", "secondTest"});
SetUpExpectCalls();
std::vector<std::string> tests_names = {"Test.secondTest"};
using ::testing::_;
std::vector<std::string> tests_names;
EXPECT_CALL(delegate,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(testing::Return(0));
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -356,21 +411,18 @@ TEST_F(TestLauncherTest, DisablePreTests) {
TEST_F(TestLauncherTest, FaultyShardSetup) {
command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
command_line->AppendSwitchASCII("test-launcher-shard-index", "2");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_FALSE(test_launcher.Run(command_line.get()));
}
// Shard index must be lesser than total shards
TEST_F(TestLauncherTest, RedirectStdio) {
AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls();
command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always");
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
}
......@@ -463,11 +515,11 @@ TEST_F(TestLauncherTest, JsonSummary) {
TimeDelta::FromMilliseconds(50), "output_second");
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.Times(2)
.WillRepeatedly(::testing::DoAll(OnTestResult(first_result),
OnTestResult(second_result),
testing::Return(2)));
.WillRepeatedly(
::testing::DoAll(OnTestResult(&test_launcher, first_result),
OnTestResult(&test_launcher, second_result)));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
// Validate the resulting JSON file is the expected output.
......@@ -514,9 +566,8 @@ TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) {
TimeDelta::FromMilliseconds(50), "output_second");
using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(
::testing::DoAll(OnTestResult(test_result), testing::Return(1)));
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce(OnTestResult(&test_launcher, test_result));
EXPECT_TRUE(test_launcher.Run(command_line.get()));
// Validate the resulting JSON file is the expected output.
......
......@@ -254,24 +254,21 @@ std::vector<TestResult> ProcessMissingTestResults(
return results;
}
// Interprets test results and reports to the test launcher.
void ProcessTestResults(TestLauncher* test_launcher,
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
int exit_code,
bool was_timeout) {
// Returns interpreted test results.
std::vector<TestResult> UnitTestProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
int exit_code,
bool was_timeout) {
std::vector<TestResult> test_results;
bool crashed = false;
bool have_test_results =
ProcessGTestOutput(output_file, &test_results, &crashed);
if (!have_test_results) {
test_results =
ProcessMissingTestResults(test_names, output, was_timeout, exit_code);
for (auto& test_result : test_results)
test_launcher->OnTestFinished(test_result);
return;
return ProcessMissingTestResults(test_names, output, was_timeout,
exit_code);
}
// TODO(phajdan.jr): Check for duplicates and mismatches between
......@@ -340,145 +337,8 @@ void ProcessTestResults(TestLauncher* test_launcher,
for (auto& i : final_results) {
// Fix the output snippet after possible changes to the test result.
i.output_snippet = GetTestOutputSnippet(i, output);
test_launcher->OnTestFinished(i);
}
}
class UnitTestProcessLifetimeObserver : public ProcessLifetimeObserver {
public:
~UnitTestProcessLifetimeObserver() override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
TestLauncher* test_launcher() { return test_launcher_; }
UnitTestPlatformDelegate* platform_delegate() { return platform_delegate_; }
const std::vector<std::string>& test_names() { return test_names_; }
int launch_flags() { return launch_flags_; }
const FilePath& output_file() { return output_file_; }
const FilePath& flag_file() { return flag_file_; }
protected:
UnitTestProcessLifetimeObserver(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file)
: ProcessLifetimeObserver(),
test_launcher_(test_launcher),
platform_delegate_(platform_delegate),
test_names_(test_names),
launch_flags_(launch_flags),
output_file_(output_file),
flag_file_(flag_file) {}
SEQUENCE_CHECKER(sequence_checker_);
private:
TestLauncher* const test_launcher_;
UnitTestPlatformDelegate* const platform_delegate_;
const std::vector<std::string> test_names_;
const int launch_flags_;
const FilePath output_file_;
const FilePath flag_file_;
DISALLOW_COPY_AND_ASSIGN(UnitTestProcessLifetimeObserver);
};
class ParallelUnitTestProcessLifetimeObserver
: public UnitTestProcessLifetimeObserver {
public:
ParallelUnitTestProcessLifetimeObserver(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file)
: UnitTestProcessLifetimeObserver(test_launcher,
platform_delegate,
test_names,
launch_flags,
output_file,
flag_file) {}
~ParallelUnitTestProcessLifetimeObserver() override = default;
private:
// ProcessLifetimeObserver:
void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override;
DISALLOW_COPY_AND_ASSIGN(ParallelUnitTestProcessLifetimeObserver);
};
void ParallelUnitTestProcessLifetimeObserver::OnCompleted(
int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ProcessTestResults(test_launcher(), test_names(), output_file(), output,
exit_code, was_timeout);
// The temporary file's directory is also temporary.
DeleteFile(output_file().DirName(), true);
if (!flag_file().empty())
DeleteFile(flag_file(), false);
}
class SerialUnitTestProcessLifetimeObserver
: public UnitTestProcessLifetimeObserver {
public:
SerialUnitTestProcessLifetimeObserver(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file,
std::vector<std::string>&& next_test_names)
: UnitTestProcessLifetimeObserver(test_launcher,
platform_delegate,
test_names,
launch_flags,
output_file,
flag_file),
next_test_names_(std::move(next_test_names)) {}
~SerialUnitTestProcessLifetimeObserver() override = default;
private:
// ProcessLifetimeObserver:
void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override;
std::vector<std::string> next_test_names_;
DISALLOW_COPY_AND_ASSIGN(SerialUnitTestProcessLifetimeObserver);
};
void SerialUnitTestProcessLifetimeObserver::OnCompleted(
int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ProcessTestResults(test_launcher(), test_names(), output_file(), output,
exit_code, was_timeout);
// The temporary file's directory is also temporary.
DeleteFile(output_file().DirName(), true);
if (!flag_file().empty())
DeleteFile(flag_file(), false);
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&RunUnitTestsSerially, test_launcher(), platform_delegate(),
std::move(next_test_names_), launch_flags()));
return final_results;
}
} // namespace
......@@ -534,77 +394,6 @@ int LaunchUnitTests(int argc,
}
#endif // defined(OS_WIN)
void RunUnitTestsSerially(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags) {
if (test_names.empty())
return;
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
FilePath output_file;
CHECK(platform_delegate->CreateResultsFile(&output_file));
FilePath flag_file;
platform_delegate->CreateTemporaryFile(&flag_file);
auto observer = std::make_unique<SerialUnitTestProcessLifetimeObserver>(
test_launcher, platform_delegate,
std::vector<std::string>(1, test_names.back()), launch_flags, output_file,
flag_file,
std::vector<std::string>(test_names.begin(), test_names.end() - 1));
CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
observer->test_names(), output_file, flag_file));
TestLauncher::LaunchOptions launch_options;
launch_options.flags = launch_flags;
test_launcher->LaunchChildGTestProcess(
cmd_line, platform_delegate->GetWrapperForChildGTestProcess(),
TestTimeouts::test_launcher_timeout(), launch_options,
std::move(observer));
}
void RunUnitTestsBatch(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags) {
if (test_names.empty())
return;
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
FilePath output_file;
CHECK(platform_delegate->CreateResultsFile(&output_file));
FilePath flag_file;
platform_delegate->CreateTemporaryFile(&flag_file);
auto observer = std::make_unique<ParallelUnitTestProcessLifetimeObserver>(
test_launcher, platform_delegate, test_names, launch_flags, output_file,
flag_file);
CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
test_names, output_file, flag_file));
// Adjust the timeout depending on how many tests we're running
// (note that e.g. the last batch of tests will be smaller).
// TODO(phajdan.jr): Consider an adaptive timeout, which can change
// depending on how many tests ran and how many remain.
// Note: do NOT parse child's stdout to do that, it's known to be
// unreliable (e.g. buffering issues can mix up the output).
TimeDelta timeout = test_names.size() * TestTimeouts::test_launcher_timeout();
TestLauncher::LaunchOptions options;
options.flags = launch_flags;
test_launcher->LaunchChildGTestProcess(
cmd_line, platform_delegate->GetWrapperForChildGTestProcess(), timeout,
options, std::move(observer));
}
DefaultUnitTestPlatformDelegate::DefaultUnitTestPlatformDelegate() = default;
bool DefaultUnitTestPlatformDelegate::GetTests(
......@@ -613,18 +402,21 @@ bool DefaultUnitTestPlatformDelegate::GetTests(
return true;
}
bool DefaultUnitTestPlatformDelegate::CreateResultsFile(base::FilePath* path) {
if (!CreateNewTempDirectory(FilePath::StringType(), path))
bool DefaultUnitTestPlatformDelegate::CreateResultsFile(
const base::FilePath& temp_dir,
base::FilePath* path) {
if (!CreateTemporaryDirInDir(temp_dir, FilePath::StringType(), path))
return false;
*path = path->AppendASCII("test_results.xml");
return true;
}
bool DefaultUnitTestPlatformDelegate::CreateTemporaryFile(
const base::FilePath& temp_dir,
base::FilePath* path) {
if (!temp_dir_.IsValid() && !temp_dir_.CreateUniqueTempDir())
if (temp_dir.empty())
return false;
return CreateTemporaryFileInDir(temp_dir_.GetPath(), path);
return CreateTemporaryFileInDir(temp_dir, path);
}
CommandLine DefaultUnitTestPlatformDelegate::GetCommandLineForChildGTestProcess(
......@@ -678,38 +470,48 @@ bool UnitTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true;
}
size_t UnitTestLauncherDelegate::RunTests(
TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
DCHECK(thread_checker_.CalledOnValidThread());
std::vector<TestResult> UnitTestLauncherDelegate::ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) {
return UnitTestProcessTestResults(test_names, output_file, output, exit_code,
was_timeout);
}
int launch_flags = use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0;
CommandLine UnitTestLauncherDelegate::GetCommandLine(
const std::vector<std::string>& test_names,
const FilePath& temp_dir,
FilePath* output_file) {
CHECK(!test_names.empty());
std::vector<std::string> batch;
for (const auto& i : test_names) {
batch.push_back(i);
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
CHECK(platform_delegate_->CreateResultsFile(temp_dir, output_file));
FilePath flag_file;
platform_delegate_->CreateTemporaryFile(temp_dir, &flag_file);
// Use 0 to indicate unlimited batch size.
if (batch.size() >= batch_limit_ && batch_limit_ != 0) {
RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags);
batch.clear();
}
}
return CommandLine(platform_delegate_->GetCommandLineForChildGTestProcess(
test_names, *output_file, flag_file));
}
RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags);
std::string UnitTestLauncherDelegate::GetWrapper() {
return platform_delegate_->GetWrapperForChildGTestProcess();
}
int UnitTestLauncherDelegate::GetLaunchOptions() {
return use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0;
}
return test_names.size();
TimeDelta UnitTestLauncherDelegate::GetTimeout() {
return TestTimeouts::test_launcher_timeout();
}
size_t UnitTestLauncherDelegate::RetryTests(
TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&RunUnitTestsSerially, test_launcher, platform_delegate_,
test_names,
use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0));
return test_names.size();
size_t UnitTestLauncherDelegate::GetBatchSize() {
return batch_limit_;
}
} // namespace base
......@@ -60,11 +60,13 @@ class UnitTestPlatformDelegate {
// Called to create a temporary for storing test results. The delegate
// must put the resulting path in |path| and return true on success.
virtual bool CreateResultsFile(base::FilePath* path) = 0;
virtual bool CreateResultsFile(const base::FilePath& temp_dir,
base::FilePath* path) = 0;
// Called to create a new temporary file. The delegate must put the resulting
// path in |path| and return true on success.
virtual bool CreateTemporaryFile(base::FilePath* path) = 0;
virtual bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) = 0;
// Returns command line for child GTest process based on the command line
// of current process. |test_names| is a vector of test full names
......@@ -95,9 +97,11 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate {
bool GetTests(std::vector<TestIdentifier>* output) override;
bool CreateResultsFile(base::FilePath* path) override;
bool CreateResultsFile(const base::FilePath& temp_dir,
base::FilePath* path) override;
bool CreateTemporaryFile(base::FilePath* path) override;
bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) override;
CommandLine GetCommandLineForChildGTestProcess(
const std::vector<std::string>& test_names,
......@@ -111,18 +115,6 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate {
DISALLOW_COPY_AND_ASSIGN(DefaultUnitTestPlatformDelegate);
};
// Runs tests serially, each in its own process.
void RunUnitTestsSerially(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags);
// Runs tests in batches (each batch in its own process).
void RunUnitTestsBatch(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags);
// Test launcher delegate for unit tests (mostly to support batching).
class UnitTestLauncherDelegate : public TestLauncherDelegate {
public:
......@@ -136,10 +128,26 @@ class UnitTestLauncherDelegate : public TestLauncherDelegate {
bool GetTests(std::vector<TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override;
size_t RunTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override;
size_t RetryTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override;
std::vector<TestResult> ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) override;
CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir,
FilePath* output_file) override;
std::string GetWrapper() override;
int GetLaunchOptions() override;
TimeDelta GetTimeout() override;
size_t GetBatchSize() override;
ThreadChecker thread_checker_;
......
......@@ -19,64 +19,6 @@
namespace base {
namespace {
// Unit tests to validate DefaultUnitTestPlatformDelegate implementations.
class DefaultUnitTestPlatformDelegateTester : public testing::Test {
protected:
UnitTestPlatformDelegate* platformDelegate;
FilePath flag_path;
FilePath output_path;
std::vector<std::string> test_names;
void SetUp() override { platformDelegate = &defaultPlatform_; }
private:
DefaultUnitTestPlatformDelegate defaultPlatform_;
};
// Call fails when flag_file does not exist.
TEST_F(DefaultUnitTestPlatformDelegateTester, FlagPathCheckFail) {
ASSERT_CHECK_DEATH(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
}
// Validate flags are set correctly in by the delegate.
TEST_F(DefaultUnitTestPlatformDelegateTester,
GetCommandLineForChildGTestProcess) {
ASSERT_TRUE(platformDelegate->CreateResultsFile(&output_path));
ASSERT_TRUE(platformDelegate->CreateTemporaryFile(&flag_path));
CommandLine cmd_line(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
EXPECT_EQ(cmd_line.GetSwitchValueASCII("test-launcher-output"),
output_path.MaybeAsASCII());
EXPECT_EQ(cmd_line.GetSwitchValueASCII("gtest_flagfile"),
flag_path.MaybeAsASCII());
EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
}
// Validate the tests are saved correctly in flag file under
// the "--gtest_filter" flag.
TEST_F(DefaultUnitTestPlatformDelegateTester, GetCommandLineFilterTest) {
test_names.push_back("Test1");
test_names.push_back("Test2");
ASSERT_TRUE(platformDelegate->CreateResultsFile(&output_path));
ASSERT_TRUE(platformDelegate->CreateTemporaryFile(&flag_path));
CommandLine cmd_line(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
const int size = 2048;
std::string content;
ASSERT_TRUE(ReadFileToStringWithMaxSize(flag_path, &content, size));
EXPECT_EQ(content.find("--gtest_filter="), 0u);
base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
std::vector<std::string> gtest_filter_tests =
SplitString(content, ":", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
for (unsigned i = 0; i < test_names.size(); i++) {
EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
}
}
// Mock TestLauncher to validate LaunchChildGTestProcess
// is called correctly inside the test launcher delegate.
class MockTestLauncher : public TestLauncher {
......@@ -85,81 +27,52 @@ class MockTestLauncher : public TestLauncher {
size_t parallel_jobs)
: TestLauncher(launcher_delegate, parallel_jobs) {}
MOCK_METHOD5(LaunchChildGTestProcess,
void(const CommandLine& command_line,
const std::string& wrapper,
TimeDelta timeout,
const LaunchOptions& options,
std::unique_ptr<ProcessLifetimeObserver> observer));
MOCK_METHOD3(LaunchChildGTestProcess,
void(scoped_refptr<TaskRunner> task_runner,
const std::vector<std::string>& test_names,
const FilePath& temp_dir));
};
// Unit tests to validate UnitTestLauncherDelegateTester implementations.
class UnitTestLauncherDelegateTester : public testing::Test {
protected:
TestLauncherDelegate* launcherDelegate;
MockTestLauncher* launcher;
std::vector<std::string> tests;
void SetUp() override { tests.assign(100, "Test"); }
// Setup test launcher delegate with a particular batch size.
void SetUpLauncherDelegate(size_t batch_size) {
launcherDelegate =
new UnitTestLauncherDelegate(&defaultPlatform, batch_size, true);
launcher = new MockTestLauncher(launcherDelegate, batch_size);
}
// Validate LaunchChildGTestProcess is called x number of times.
void ValidateChildGTestProcessCalls(int times_called) {
using ::testing::_;
EXPECT_CALL(*launcher, LaunchChildGTestProcess(_, _, _, _, _))
.Times(times_called);
}
void TearDown() override {
delete launcherDelegate;
delete launcher;
}
private:
DefaultUnitTestPlatformDelegate defaultPlatform;
};
// Validate 0 batch size corresponds to unlimited batch size.
TEST_F(UnitTestLauncherDelegateTester, RunTestsWithUnlimitedBatchSize) {
SetUpLauncherDelegate(0);
ValidateChildGTestProcessCalls(1);
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size());
}
// Validate edge case, no tests to run.
TEST_F(UnitTestLauncherDelegateTester, RunTestsWithEmptyTests) {
SetUpLauncherDelegate(0);
// Validate delegate produces correct command line.
TEST_F(UnitTestLauncherDelegateTester, GetCommandLine) {
UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 10u, true);
TestLauncherDelegate* delegate_ptr = &launcher_delegate;
ValidateChildGTestProcessCalls(0);
tests.clear();
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size());
}
std::vector<std::string> test_names(5, "Tests");
base::FilePath temp_dir;
base::FilePath result_file;
CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
// Validate delegate slices batch size correctly.
TEST_F(UnitTestLauncherDelegateTester, RunTestsBatchSize10) {
SetUpLauncherDelegate(10);
CommandLine cmd_line =
delegate_ptr->GetCommandLine(test_names, temp_dir, &result_file);
EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
EXPECT_EQ(cmd_line.GetSwitchValuePath("test-launcher-output"), result_file);
ValidateChildGTestProcessCalls(10);
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size());
const int size = 2048;
std::string content;
ASSERT_TRUE(ReadFileToStringWithMaxSize(
cmd_line.GetSwitchValuePath("gtest_flagfile"), &content, size));
EXPECT_EQ(content.find("--gtest_filter="), 0u);
base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
std::vector<std::string> gtest_filter_tests =
SplitString(content, ":", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
for (unsigned i = 0; i < test_names.size(); i++) {
EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
}
}
// ValidateRetryTests will only kick-off one run.
TEST_F(UnitTestLauncherDelegateTester, RetryTests) {
// ScopedTaskEviorment is needed since RetryTests uses thread task
// runner to start.
test::ScopedTaskEnvironment task_environment;
SetUpLauncherDelegate(10);
ValidateChildGTestProcessCalls(1);
EXPECT_EQ(launcherDelegate->RetryTests(launcher, tests), tests.size());
RunLoop().RunUntilIdle();
// Validate delegate sets batch size correctly.
TEST_F(UnitTestLauncherDelegateTester, BatchSize) {
UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 15u, true);
TestLauncherDelegate* delegate_ptr = &launcher_delegate;
EXPECT_EQ(delegate_ptr->GetBatchSize(), 15u);
}
} // namespace
......
......@@ -82,13 +82,6 @@ TestLauncherDelegate* g_launcher_delegate = nullptr;
ContentMainParams* g_params = nullptr;
#endif
std::string RemoveAnyPrePrefixes(const std::string& test_name) {
std::string result(test_name);
base::ReplaceSubstringsAfterOffset(
&result, 0, kPreTestPrefix, base::StringPiece());
return result;
}
void PrintUsage() {
fprintf(stdout,
"Runs tests using the gtest framework, each batch of tests being\n"
......@@ -135,70 +128,28 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
public:
explicit WrapperTestLauncherDelegate(
content::TestLauncherDelegate* launcher_delegate)
: launcher_delegate_(launcher_delegate) {
CHECK(temp_dir_.CreateUniqueTempDir());
}
: launcher_delegate_(launcher_delegate) {}
// base::TestLauncherDelegate:
bool GetTests(std::vector<base::TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override;
size_t RunTests(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override;
size_t RetryTests(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override;
private:
class ChildProcessLifetimeObserver : public base::ProcessLifetimeObserver {
public:
ChildProcessLifetimeObserver(
WrapperTestLauncherDelegate* test_launcher_delegate,
base::TestLauncher* test_launcher,
std::vector<std::string>&& next_test_names,
const std::string& test_name,
const base::FilePath& output_file)
: base::ProcessLifetimeObserver(),
test_launcher_delegate_(test_launcher_delegate),
test_launcher_(test_launcher),
next_test_names_(std::move(next_test_names)),
test_name_(test_name),
output_file_(output_file) {}
~ChildProcessLifetimeObserver() override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
private:
void OnTimedOut(const base::CommandLine& command_line) override {
test_launcher_delegate_->OnTestTimedOut(command_line);
}
base::CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const base::FilePath& temp_dir,
base::FilePath* output_file) override;
void OnCompleted(int exit_code,
base::TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
test_launcher_delegate_->GTestCallback(
test_launcher_, next_test_names_, test_name_, output_file_, exit_code,
elapsed_time, was_timeout, output);
}
size_t GetBatchSize() override;
SEQUENCE_CHECKER(sequence_checker_);
WrapperTestLauncherDelegate* test_launcher_delegate_;
base::TestLauncher* test_launcher_;
std::vector<std::string> next_test_names_;
std::string test_name_;
base::FilePath output_file_;
std::string GetWrapper() override;
DISALLOW_COPY_AND_ASSIGN(ChildProcessLifetimeObserver);
};
int GetLaunchOptions() override;
void DoRunTests(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names);
base::TimeDelta GetTimeout() override;
private:
// Relays timeout notification from the TestLauncher (by way of a
// ProcessLifetimeObserver) to the caller's content::TestLauncherDelegate.
void OnTestTimedOut(const base::CommandLine& command_line);
void OnTestTimedOut(const base::CommandLine& command_line) override;
// Callback to receive result of a test.
// |output_file| is a path to xml file written by test-launcher
......@@ -206,28 +157,17 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
// EXPECT/ASSERT/DCHECK statements. Test launcher parses that
// file to get additional information about test run (status,
// error-messages, stack-traces and file/line for failures).
void GTestCallback(base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names,
const std::string& test_name,
const base::FilePath& output_file,
int exit_code,
const base::TimeDelta& elapsed_time,
bool was_timeout,
const std::string& output);
std::vector<base::TestResult> ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) override;
content::TestLauncherDelegate* launcher_delegate_;
// Store unique data directory prefix for test names (without PRE_ prefixes).
// PRE_ tests and tests that depend on them must share the same
// data directory. Using test name as directory name leads to too long
// names (exceeding UNIX_PATH_MAX, which creates a problem with
// process_singleton_linux). Create a randomly-named temporary directory
// and keep track of the names so that PRE_ tests can still re-use them.
typedef std::map<std::string, base::FilePath> UserDataDirMap;
UserDataDirMap user_data_dir_map_;
// Temporary directory for user data directories.
base::ScopedTempDir temp_dir_;
DISALLOW_COPY_AND_ASSIGN(WrapperTestLauncherDelegate);
};
......@@ -253,81 +193,26 @@ bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true;
}
size_t WrapperTestLauncherDelegate::RunTests(
base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
user_data_dir_map_.clear();
std::vector<std::string> test_list;
for (const std::string& test_name : test_names) {
// Stack all dependent tests and run them sequentially.
test_list.push_back(test_name);
if (!IsPreTestName(test_name)) {
if (!base::Contains(user_data_dir_map_, test_name)) {
base::FilePath temp_dir;
CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(),
FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[test_name] = temp_dir;
}
DoRunTests(test_launcher, test_list);
test_list.clear();
}
}
return test_names.size();
}
size_t WrapperTestLauncherDelegate::RetryTests(
base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
// Discard user data directories from any previous runs. Start with
// fresh state.
for (const auto& it : user_data_dir_map_) {
// Delete temporary directories now to avoid using too much space in /tmp.
if (!base::DeleteFile(it.second, true)) {
LOG(WARNING) << "Failed to delete " << it.second.value();
}
}
user_data_dir_map_.clear();
for (const std::string& full_name : test_names) {
// Make sure PRE_ tests and tests that depend on them share the same
// data directory - based it on the test name without prefixes.
std::string test_name_no_pre(RemoveAnyPrePrefixes(full_name));
if (!base::Contains(user_data_dir_map_, test_name_no_pre)) {
base::FilePath temp_dir;
CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(),
FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[test_name_no_pre] = temp_dir;
}
}
DoRunTests(test_launcher, test_names);
return test_names.size();
size_t WrapperTestLauncherDelegate::GetBatchSize() {
return 1u;
}
void WrapperTestLauncherDelegate::DoRunTests(
base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
if (test_names.empty())
return;
base::CommandLine WrapperTestLauncherDelegate::GetCommandLine(
const std::vector<std::string>& test_names,
const base::FilePath& temp_dir,
base::FilePath* output_file) {
DCHECK_EQ(1u, test_names.size());
std::string test_name(test_names.front());
std::vector<std::string> test_names_copy(
test_names.begin() + 1, test_names.end());
std::string test_name_no_pre(RemoveAnyPrePrefixes(test_name));
// Chained pre tests must share the same temp directory,
// TestLauncher should guarantee that for the delegate.
base::FilePath user_data_dir = temp_dir.AppendASCII("user_data");
CreateDirectory(user_data_dir);
base::CommandLine cmd_line(*base::CommandLine::ForCurrentProcess());
base::TestLauncher::LaunchOptions test_launch_options;
test_launch_options.flags = base::TestLauncher::USE_JOB_OBJECTS |
base::TestLauncher::ALLOW_BREAKAWAY_FROM_JOB;
launcher_delegate_->PreRunTest();
CHECK(launcher_delegate_->AdjustChildProcessCommandLine(
&cmd_line, user_data_dir_map_[test_name_no_pre]));
CHECK(launcher_delegate_->AdjustChildProcessCommandLine(&cmd_line,
user_data_dir));
base::CommandLine new_cmd_line(cmd_line.GetProgram());
base::CommandLine::SwitchMap switches = cmd_line.GetSwitches();
// Strip out gtest_output flag because otherwise we would overwrite results
// of the other tests.
switches.erase(base::kGTestOutputFlag);
......@@ -335,12 +220,11 @@ void WrapperTestLauncherDelegate::DoRunTests(
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
base::FilePath output_file;
CHECK(base::CreateTemporaryDirInDir(
temp_dir_.GetPath(), FILE_PATH_LITERAL("results"), &output_file));
output_file = output_file.AppendASCII("test_results.xml");
CHECK(base::CreateTemporaryDirInDir(temp_dir, FILE_PATH_LITERAL("results"),
output_file));
*output_file = output_file->AppendASCII("test_results.xml");
new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, output_file);
new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, *output_file);
for (base::CommandLine::SwitchMap::const_iterator iter = switches.begin();
iter != switches.end(); ++iter) {
......@@ -352,18 +236,21 @@ void WrapperTestLauncherDelegate::DoRunTests(
new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests");
new_cmd_line.AppendSwitchASCII("gtest_filter", test_name);
new_cmd_line.AppendSwitch(kSingleProcessTestsFlag);
return new_cmd_line;
}
std::string WrapperTestLauncherDelegate::GetWrapper() {
char* browser_wrapper = getenv("BROWSER_WRAPPER");
return browser_wrapper ? browser_wrapper : std::string();
}
auto observer = std::make_unique<ChildProcessLifetimeObserver>(
this, test_launcher, std::move(test_names_copy), test_name, output_file);
int WrapperTestLauncherDelegate::GetLaunchOptions() {
return base::TestLauncher::USE_JOB_OBJECTS |
base::TestLauncher::ALLOW_BREAKAWAY_FROM_JOB;
}
// Must use test_launcher_timeout() here because this process is allowed to
// assume that it can use an entire action_max_timeout() in its lifespan.
test_launcher->LaunchChildGTestProcess(
new_cmd_line, browser_wrapper ? browser_wrapper : std::string(),
TestTimeouts::test_launcher_timeout(), test_launch_options,
std::move(observer));
base::TimeDelta WrapperTestLauncherDelegate::GetTimeout() {
return TestTimeouts::test_launcher_timeout();
}
void WrapperTestLauncherDelegate::OnTestTimedOut(
......@@ -371,16 +258,16 @@ void WrapperTestLauncherDelegate::OnTestTimedOut(
launcher_delegate_->OnTestTimedOut(command_line);
}
void WrapperTestLauncherDelegate::GTestCallback(
base::TestLauncher* test_launcher,
std::vector<base::TestResult> WrapperTestLauncherDelegate::ProcessTestResults(
const std::vector<std::string>& test_names,
const std::string& test_name,
const base::FilePath& output_file,
int exit_code,
const std::string& output,
const base::TimeDelta& elapsed_time,
bool was_timeout,
const std::string& output) {
int exit_code,
bool was_timeout) {
base::TestResult result;
DCHECK_EQ(1u, test_names.size());
std::string test_name = test_names.front();
result.full_name = test_name;
bool crashed = false;
......@@ -430,19 +317,7 @@ void WrapperTestLauncherDelegate::GTestCallback(
launcher_delegate_->PostRunTest(&result);
// No other tests depend on this, we can delete the temporary directory now.
// Do so to avoid too many temporary files using lots of disk space.
if (base::Contains(user_data_dir_map_, test_name)) {
if (!base::DeleteFile(user_data_dir_map_[test_name], true)) {
LOG(WARNING) << "Failed to delete "
<< user_data_dir_map_[test_name].value();
}
user_data_dir_map_.erase(test_name);
}
test_launcher->OnTestFinished(result);
DoRunTests(test_launcher, test_names);
return std::vector<base::TestResult>({result});
}
} // namespace
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment