Commit bb2dfc38 authored by Ilia Samsonov's avatar Ilia Samsonov Committed by Commit Bot

Removed ProcessLifetimeObserver from TestLauncher.

The goal of this cl is to move all child process logic to TestLauncher.
This should simplify the gtest launcher structure and clarify
each class responsibilities.

TestRunner controls running test processes across sequence runners.

TestLauncherDelegate is now limited to provide test specific needs.
Command line for tests, timeout, result processing, etc.

This allows us to remove the ProcessLifetimeObserver and its extending
classes

The original change has been reverted since result processing
was not thread safe.

Bug: 936248,848465
Change-Id: I2468cf2e92901c7f2c4a6f42838be219397a7b0a
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1721429
Commit-Queue: Ilia Samsonov <isamsonov@google.com>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarErik Chen <erikchen@chromium.org>
Cr-Commit-Position: refs/heads/master@{#682916}
parent 3e858960
...@@ -268,7 +268,7 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line, ...@@ -268,7 +268,7 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
const LaunchOptions& options, const LaunchOptions& options,
int flags, int flags,
TimeDelta timeout, TimeDelta timeout,
ProcessLifetimeObserver* observer, TestLauncherDelegate* delegate,
bool* was_timeout) { bool* was_timeout) {
TimeTicks start_time(TimeTicks::Now()); TimeTicks start_time(TimeTicks::Now());
...@@ -410,8 +410,8 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line, ...@@ -410,8 +410,8 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
} }
if (!did_exit) { if (!did_exit) {
if (observer) if (delegate)
observer->OnTimedOut(command_line); delegate->OnTestTimedOut(command_line);
*was_timeout = true; *was_timeout = true;
exit_code = -1; // Set a non-zero exit code to signal a failure. exit_code = -1; // Set a non-zero exit code to signal a failure.
...@@ -454,15 +454,29 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line, ...@@ -454,15 +454,29 @@ int LaunchChildTestProcessWithOptions(const CommandLine& command_line,
return exit_code; return exit_code;
} }
void DoLaunchChildTestProcess( struct ChildProcessResults {
// Total time for DoLaunchChildTest Process to execute.
TimeDelta elapsed_time;
// If stdio is redirected, pass output file content.
std::string output_file_contents;
// True if child process timed out.
bool was_timeout = false;
// Exit code of child process.
int exit_code;
};
// This launches the child test process, waits for it to complete,
// and returns child process results.
ChildProcessResults DoLaunchChildTestProcess(
const CommandLine& command_line, const CommandLine& command_line,
TimeDelta timeout, TimeDelta timeout,
const TestLauncher::LaunchOptions& test_launch_options, const TestLauncher::LaunchOptions& test_launch_options,
bool redirect_stdio, bool redirect_stdio,
SingleThreadTaskRunner* task_runner, TestLauncherDelegate* delegate) {
std::unique_ptr<ProcessLifetimeObserver> observer) {
TimeTicks start_time = TimeTicks::Now(); TimeTicks start_time = TimeTicks::Now();
ChildProcessResults result;
ScopedFILE output_file; ScopedFILE output_file;
FilePath output_filename; FilePath output_filename;
if (redirect_stdio) { if (redirect_stdio) {
...@@ -512,34 +526,26 @@ void DoLaunchChildTestProcess( ...@@ -512,34 +526,26 @@ void DoLaunchChildTestProcess(
#endif // !defined(OS_WIN) #endif // !defined(OS_WIN)
bool was_timeout = false; result.exit_code = LaunchChildTestProcessWithOptions(
int exit_code = LaunchChildTestProcessWithOptions( command_line, options, test_launch_options.flags, timeout, delegate,
command_line, options, test_launch_options.flags, timeout, observer.get(), &result.was_timeout);
&was_timeout);
std::string output_file_contents;
if (redirect_stdio) { if (redirect_stdio) {
fflush(output_file.get()); fflush(output_file.get());
output_file.reset(); output_file.reset();
// Reading the file can sometimes fail when the process was killed midflight // Reading the file can sometimes fail when the process was killed midflight
// (e.g. on test suite timeout): https://crbug.com/826408. Attempt to read // (e.g. on test suite timeout): https://crbug.com/826408. Attempt to read
// the output file anyways, but do not crash on failure in this case. // the output file anyways, but do not crash on failure in this case.
CHECK(ReadFileToString(output_filename, &output_file_contents) || CHECK(ReadFileToString(output_filename, &result.output_file_contents) ||
exit_code != 0); result.exit_code != 0);
if (!DeleteFile(output_filename, false)) { if (!DeleteFile(output_filename, false)) {
// This needs to be non-fatal at least for Windows. // This needs to be non-fatal at least for Windows.
LOG(WARNING) << "Failed to delete " << output_filename.AsUTF8Unsafe(); LOG(WARNING) << "Failed to delete " << output_filename.AsUTF8Unsafe();
} }
} }
result.elapsed_time = TimeTicks::Now() - start_time;
// Invoke OnCompleted on the thread it was originating from, not on a worker return result;
// pool thread.
task_runner->PostTask(
FROM_HERE,
BindOnce(&ProcessLifetimeObserver::OnCompleted, std::move(observer),
exit_code, TimeTicks::Now() - start_time, was_timeout,
output_file_contents));
} }
std::vector<std::string> ExtractTestsFromFilter(const std::string& filter, std::vector<std::string> ExtractTestsFromFilter(const std::string& filter,
...@@ -556,6 +562,122 @@ std::vector<std::string> ExtractTestsFromFilter(const std::string& filter, ...@@ -556,6 +562,122 @@ std::vector<std::string> ExtractTestsFromFilter(const std::string& filter,
return tests; return tests;
} }
// A test runner object to run tests across a number of sequence runners,
// and control running pre tests in sequence.
class TestRunner {
public:
explicit TestRunner(TestLauncher* launcher,
size_t runner_count = 1u,
size_t batch_size = 1u)
: launcher_(launcher),
runner_count_(runner_count),
batch_size_(batch_size),
weak_ptr_factory_(this) {}
// Sets |test_names| to be run, with |batch_size| tests per process.
// Posts LaunchNextTask |runner_count| number of times, each with a separate
// task runner.
void Run(const std::vector<std::string>& test_names);
private:
// Called to check if the next batch has to run on the same
// sequence task runner and using the same temporary directory.
bool ShouldReuseStateFromLastBatch(
const std::vector<std::string>& test_names) {
return test_names.size() == 1u &&
test_names.front().find(kPreTestPrefix) != std::string::npos;
}
// Launch next child process on |task_runner|,
// and clear |temp_dir| from previous process.
void LaunchNextTask(scoped_refptr<TaskRunner> task_runner, FilePath temp_dir);
// Forward |temp_dir| and Launch next task on main thread.
// The method is called on |task_runner|.
void ClearAndLaunchNext(scoped_refptr<TaskRunner> main_thread_runner,
scoped_refptr<TaskRunner> task_runner,
const FilePath& temp_dir) {
main_thread_runner->PostTask(
FROM_HERE,
BindOnce(&TestRunner::LaunchNextTask, weak_ptr_factory_.GetWeakPtr(),
task_runner, temp_dir));
}
ThreadChecker thread_checker_;
std::vector<std::string> tests_to_run_;
TestLauncher* const launcher_;
std::vector<scoped_refptr<TaskRunner>> task_runners_;
// Number of sequenced task runners to use.
const size_t runner_count_;
// Number of TaskRunners that have finished.
size_t runners_done_ = 0;
// Number of tests per process, 0 is special case for all tests.
const size_t batch_size_;
RunLoop run_loop_;
base::WeakPtrFactory<TestRunner> weak_ptr_factory_;
};
void TestRunner::Run(const std::vector<std::string>& test_names) {
DCHECK(thread_checker_.CalledOnValidThread());
// No sequence runners, fail immediately.
CHECK_GT(runner_count_, 0u);
tests_to_run_ = test_names;
// Reverse test order to avoid coping the whole vector when removing tests.
std::reverse(tests_to_run_.begin(), tests_to_run_.end());
runners_done_ = 0;
task_runners_.clear();
for (size_t i = 0; i < runner_count_; i++) {
task_runners_.push_back(CreateSequencedTaskRunnerWithTraits(
{MayBlock(), TaskShutdownBehavior::BLOCK_SHUTDOWN}));
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&TestRunner::LaunchNextTask, weak_ptr_factory_.GetWeakPtr(),
task_runners_.back(), FilePath()));
}
run_loop_.Run();
}
void TestRunner::LaunchNextTask(scoped_refptr<TaskRunner> task_runner,
FilePath temp_dir) {
DCHECK(thread_checker_.CalledOnValidThread());
// delete previous temporary directory
if (!temp_dir.empty() && !DeleteFile(temp_dir, false)) {
// This needs to be non-fatal at least for Windows.
LOG(WARNING) << "Failed to delete " << temp_dir.AsUTF8Unsafe();
}
// No more tests to run, finish sequence.
if (tests_to_run_.empty()) {
runners_done_++;
// All sequence runners are done, quit the loop.
if (runners_done_ == runner_count_)
run_loop_.QuitWhenIdle();
return;
}
CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
bool post_to_current_runner = true;
size_t batch_size = (batch_size_ == 0) ? tests_to_run_.size() : batch_size_;
while (post_to_current_runner && !tests_to_run_.empty()) {
batch_size = std::min(batch_size, tests_to_run_.size());
std::vector<std::string> batch(tests_to_run_.rbegin(),
tests_to_run_.rbegin() + batch_size);
tests_to_run_.erase(tests_to_run_.end() - batch_size, tests_to_run_.end());
task_runner->PostTask(
FROM_HERE,
BindOnce(&TestLauncher::LaunchChildGTestProcess, Unretained(launcher_),
ThreadTaskRunnerHandle::Get(), batch, temp_dir));
post_to_current_runner = ShouldReuseStateFromLastBatch(batch);
}
task_runner->PostTask(
FROM_HERE,
BindOnce(&TestRunner::ClearAndLaunchNext, Unretained(this),
ThreadTaskRunnerHandle::Get(), task_runner, temp_dir));
}
} // namespace } // namespace
const char kGTestBreakOnFailure[] = "gtest_break_on_failure"; const char kGTestBreakOnFailure[] = "gtest_break_on_failure";
...@@ -740,23 +862,44 @@ bool TestLauncher::Run(CommandLine* command_line) { ...@@ -740,23 +862,44 @@ bool TestLauncher::Run(CommandLine* command_line) {
} }
void TestLauncher::LaunchChildGTestProcess( void TestLauncher::LaunchChildGTestProcess(
const CommandLine& command_line, scoped_refptr<TaskRunner> task_runner,
const std::string& wrapper, const std::vector<std::string>& test_names,
TimeDelta timeout, const FilePath& temp_dir) {
const LaunchOptions& options, FilePath result_file;
std::unique_ptr<ProcessLifetimeObserver> observer) { CommandLine cmd_line =
DCHECK(thread_checker_.CalledOnValidThread()); launcher_delegate_->GetCommandLine(test_names, temp_dir, &result_file);
// Record the exact command line used to launch the child. // Record the exact command line used to launch the child.
CommandLine new_command_line( CommandLine new_command_line(
PrepareCommandLineForGTest(command_line, wrapper)); PrepareCommandLineForGTest(cmd_line, launcher_delegate_->GetWrapper()));
LaunchOptions options;
options.flags = launcher_delegate_->GetLaunchOptions();
ChildProcessResults process_results = DoLaunchChildTestProcess(
new_command_line, launcher_delegate_->GetTimeout() * test_names.size(),
options, redirect_stdio_, launcher_delegate_);
PostTask( // Invoke ProcessTestResults on the original thread, not
// on a worker pool thread.
task_runner->PostTask(
FROM_HERE, FROM_HERE,
{ThreadPool(), MayBlock(), TaskShutdownBehavior::BLOCK_SHUTDOWN}, BindOnce(&TestLauncher::ProcessTestResults, Unretained(this), test_names,
BindOnce(&DoLaunchChildTestProcess, new_command_line, timeout, options, result_file, process_results.output_file_contents,
redirect_stdio_, RetainedRef(ThreadTaskRunnerHandle::Get()), process_results.elapsed_time, process_results.exit_code,
std::move(observer))); process_results.was_timeout));
}
void TestLauncher::ProcessTestResults(
const std::vector<std::string>& test_names,
const base::FilePath& result_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) {
std::vector<TestResult> test_results = launcher_delegate_->ProcessTestResults(
test_names, result_file, output, elapsed_time, exit_code, was_timeout);
for (const auto& result : test_results)
OnTestFinished(result);
} }
void TestLauncher::OnTestFinished(const TestResult& original_result) { void TestLauncher::OnTestFinished(const TestResult& original_result) {
...@@ -859,8 +1002,6 @@ void TestLauncher::OnTestFinished(const TestResult& original_result) { ...@@ -859,8 +1002,6 @@ void TestLauncher::OnTestFinished(const TestResult& original_result) {
exit(1); exit(1);
} }
if (test_finished_count_ == test_started_count_)
RunLoop::QuitCurrentWhenIdleDeprecated();
} }
// Helper used to parse test filter files. Syntax is documented in // Helper used to parse test filter files. Syntax is documented in
...@@ -1385,10 +1526,11 @@ void TestLauncher::RunTests() { ...@@ -1385,10 +1526,11 @@ void TestLauncher::RunTests() {
broken_threshold_ = std::max(static_cast<size_t>(20), test_found_count / 10); broken_threshold_ = std::max(static_cast<size_t>(20), test_found_count / 10);
test_started_count_ = launcher_delegate_->RunTests(this, test_names); test_started_count_ = test_names.size();
if (test_started_count_ > 0) TestRunner test_runner(this, parallel_jobs_,
RunLoop().Run(); launcher_delegate_->GetBatchSize());
test_runner.Run(test_names);
} }
bool TestLauncher::RunRetryTests() { bool TestLauncher::RunRetryTests() {
...@@ -1403,9 +1545,7 @@ bool TestLauncher::RunRetryTests() { ...@@ -1403,9 +1545,7 @@ bool TestLauncher::RunRetryTests() {
} }
tests_to_retry_.clear(); tests_to_retry_.clear();
size_t retry_started_count = size_t retry_started_count = test_names.size();
launcher_delegate_->RetryTests(this, test_names);
test_started_count_ += retry_started_count; test_started_count_ += retry_started_count;
// Only invoke RunLoop if there are any tasks to run. // Only invoke RunLoop if there are any tasks to run.
...@@ -1416,8 +1556,8 @@ bool TestLauncher::RunRetryTests() { ...@@ -1416,8 +1556,8 @@ bool TestLauncher::RunRetryTests() {
retry_started_count > 1 ? "s" : "", retry_count); retry_started_count > 1 ? "s" : "", retry_count);
fflush(stdout); fflush(stdout);
RunLoop().Run(); TestRunner test_runner(this);
test_runner.Run(test_names);
retry_count++; retry_count++;
} }
return tests_to_retry_.empty(); return tests_to_retry_.empty();
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "base/command_line.h"
#include "base/compiler_specific.h" #include "base/compiler_specific.h"
#include "base/macros.h" #include "base/macros.h"
#include "base/process/launch.h" #include "base/process/launch.h"
...@@ -26,10 +27,6 @@ ...@@ -26,10 +27,6 @@
namespace base { namespace base {
class CommandLine;
struct LaunchOptions;
class TestLauncher;
// Constants for GTest command-line flags. // Constants for GTest command-line flags.
extern const char kGTestFilterFlag[]; extern const char kGTestFilterFlag[];
extern const char kGTestFlagfileFlag[]; extern const char kGTestFlagfileFlag[];
...@@ -57,53 +54,47 @@ class TestLauncherDelegate { ...@@ -57,53 +54,47 @@ class TestLauncherDelegate {
virtual bool WillRunTest(const std::string& test_case_name, virtual bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) = 0; const std::string& test_name) = 0;
// Called to make the delegate run the specified tests. The delegate must // Invoked after a child process finishes, reporting the process |exit_code|,
// return the number of actual tests it's going to run (can be smaller, // child process |elapsed_time|, whether or not the process was terminated as
// equal to, or larger than size of |test_names|). It must also call // a result of a timeout, and the output of the child (stdout and stderr
// |test_launcher|'s OnTestFinished method once per every run test, // together). NOTE: this method is invoked on the main thread.
// regardless of its success. // Returns test results of child process.
// If test_names contains PRE_ chained tests, they must be properly ordered virtual std::vector<TestResult> ProcessTestResults(
// and consecutive. const std::vector<std::string>& test_names,
virtual size_t RunTests(TestLauncher* test_launcher, const base::FilePath& output_file,
const std::vector<std::string>& test_names) = 0; const std::string& output,
const base::TimeDelta& elapsed_time,
// Called to make the delegate retry the specified tests. The delegate must int exit_code,
// return the number of actual tests it's going to retry (can be smaller, bool was_timeout) = 0;
// equal to, or larger than size of |test_names|). It must also call
// |test_launcher|'s OnTestFinished method once per every retried test, // Called to get the command line for the specified tests.
// regardless of its success. // |output_file_| is populated with the path to the result file, and must
virtual size_t RetryTests(TestLauncher* test_launcher, // be inside |temp_dir|.
const std::vector<std::string>& test_names) = 0; virtual CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir,
virtual ~TestLauncherDelegate(); FilePath* output_file) = 0;
};
// An observer of child process lifetime events generated by
// LaunchChildGTestProcess.
class ProcessLifetimeObserver {
public:
virtual ~ProcessLifetimeObserver() = default;
// Invoked when a test process exceeds its runtime, immediately before it is // Invoked when a test process exceeds its runtime, immediately before it is
// terminated. |command_line| is the command line used to launch the process. // terminated. |command_line| is the command line used to launch the process.
// NOTE: this method is invoked on the thread the process is launched on. // NOTE: this method is invoked on the thread the process is launched on.
virtual void OnTimedOut(const CommandLine& command_line) {} virtual void OnTestTimedOut(const base::CommandLine& cmd_line) {}
// Invoked after a child process finishes, reporting the process |exit_code|, // Returns the delegate specific wrapper for command line.
// child process |elapsed_time|, whether or not the process was terminated as // If it is not empty, it is prepended to the final command line.
// a result of a timeout, and the output of the child (stdout and stderr virtual std::string GetWrapper() = 0;
// together). NOTE: this method is invoked on the same thread as
// LaunchChildGTestProcess.
virtual void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {}
protected: // Returns the delegate specific flags for launch options.
ProcessLifetimeObserver() = default; // The flags are specified in LaunchChildGTestProcessFlags.
virtual int GetLaunchOptions() = 0;
private: // Returns the delegate specific timeout per test.
DISALLOW_COPY_AND_ASSIGN(ProcessLifetimeObserver); virtual TimeDelta GetTimeout() = 0;
// Returns the delegate specific batch size.
virtual size_t GetBatchSize() = 0;
protected:
virtual ~TestLauncherDelegate();
}; };
// Launches tests using a TestLauncherDelegate. // Launches tests using a TestLauncherDelegate.
...@@ -151,18 +142,16 @@ class TestLauncher { ...@@ -151,18 +142,16 @@ class TestLauncher {
// if null, uses command line for current process. // if null, uses command line for current process.
bool Run(CommandLine* command_line = nullptr) WARN_UNUSED_RESULT; bool Run(CommandLine* command_line = nullptr) WARN_UNUSED_RESULT;
// Launches a child process (assumed to be gtest-based binary) using // Launches a child process (assumed to be gtest-based binary) which runs
// |command_line|. If |wrapper| is not empty, it is prepended to the final // tests indicated by |test_names|.
// command line. |observer|, if not null, is used to convey process lifetime // |task_runner| is used to post results back to the launcher
// events to the caller. |observer| is destroyed after its OnCompleted // on the main thread. |temp_dir| is used for child process files,
// method is invoked. // such as user data, result file, and flag_file.
// virtual to mock in testing. // virtual to mock in testing.
virtual void LaunchChildGTestProcess( virtual void LaunchChildGTestProcess(
const CommandLine& command_line, scoped_refptr<TaskRunner> task_runner,
const std::string& wrapper, const std::vector<std::string>& test_names,
TimeDelta timeout, const FilePath& temp_dir);
const LaunchOptions& options,
std::unique_ptr<ProcessLifetimeObserver> observer);
// Called when a test has finished running. // Called when a test has finished running.
void OnTestFinished(const TestResult& result); void OnTestFinished(const TestResult& result);
...@@ -220,6 +209,13 @@ class TestLauncher { ...@@ -220,6 +209,13 @@ class TestLauncher {
// wait for child processes). virtual to mock in testing. // wait for child processes). virtual to mock in testing.
virtual void CreateAndStartThreadPool(int num_parallel_jobs); virtual void CreateAndStartThreadPool(int num_parallel_jobs);
void ProcessTestResults(const std::vector<std::string>& test_names,
const base::FilePath& result_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout);
// Make sure we don't accidentally call the wrong methods e.g. on the worker // Make sure we don't accidentally call the wrong methods e.g. on the worker
// pool thread. Should be the first member so that it's destroyed last: when // pool thread. Should be the first member so that it's destroyed last: when
// destroying other members, especially the worker pool, we may check the code // destroying other members, especially the worker pool, we may check the code
......
...@@ -80,14 +80,19 @@ class NonSfiUnitTestPlatformDelegate : public base::UnitTestPlatformDelegate { ...@@ -80,14 +80,19 @@ class NonSfiUnitTestPlatformDelegate : public base::UnitTestPlatformDelegate {
} }
private: private:
bool CreateResultsFile(base::FilePath* path) override { bool CreateResultsFile(const base::FilePath& temp_dir,
if (!base::CreateNewTempDirectory(base::FilePath::StringType(), path)) base::FilePath* path) override {
if (!base::CreateTemporaryDirInDir(temp_dir, base::FilePath::StringType(),
path))
return false; return false;
*path = path->AppendASCII("test_results.xml"); *path = path->AppendASCII("test_results.xml");
return true; return true;
} }
bool CreateTemporaryFile(base::FilePath* path) override { return false; } bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) override {
return false;
}
bool GetTests(std::vector<base::TestIdentifier>* output) override { bool GetTests(std::vector<base::TestIdentifier>* output) override {
base::FilePath output_file; base::FilePath output_file;
......
...@@ -10,9 +10,11 @@ ...@@ -10,9 +10,11 @@
#include "base/files/file_util.h" #include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h" #include "base/files/scoped_temp_dir.h"
#include "base/json/json_reader.h" #include "base/json/json_reader.h"
#include "base/json/json_writer.h"
#include "base/test/launcher/test_launcher.h" #include "base/test/launcher/test_launcher.h"
#include "base/test/launcher/unit_test_launcher.h" #include "base/test/launcher/unit_test_launcher.h"
#include "base/test/scoped_task_environment.h" #include "base/test/scoped_task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h" #include "base/threading/thread_task_runner_handle.h"
#include "testing/gmock/include/gmock/gmock.h" #include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
...@@ -56,6 +58,11 @@ class MockTestLauncher : public TestLauncher { ...@@ -56,6 +58,11 @@ class MockTestLauncher : public TestLauncher {
: TestLauncher(launcher_delegate, parallel_jobs) {} : TestLauncher(launcher_delegate, parallel_jobs) {}
void CreateAndStartThreadPool(int parallel_jobs) override {} void CreateAndStartThreadPool(int parallel_jobs) override {}
MOCK_METHOD3(LaunchChildGTestProcess,
void(scoped_refptr<TaskRunner> task_runner,
const std::vector<std::string>& test_names,
const FilePath& temp_dir));
}; };
// Simple TestLauncherDelegate mock to test TestLauncher flow. // Simple TestLauncherDelegate mock to test TestLauncher flow.
...@@ -65,12 +72,23 @@ class MockTestLauncherDelegate : public TestLauncherDelegate { ...@@ -65,12 +72,23 @@ class MockTestLauncherDelegate : public TestLauncherDelegate {
MOCK_METHOD2(WillRunTest, MOCK_METHOD2(WillRunTest,
bool(const std::string& test_case_name, bool(const std::string& test_case_name,
const std::string& test_name)); const std::string& test_name));
MOCK_METHOD2(RunTests, MOCK_METHOD6(
size_t(TestLauncher* test_launcher, ProcessTestResults,
const std::vector<std::string>& test_names)); std::vector<TestResult>(const std::vector<std::string>& test_names,
MOCK_METHOD2(RetryTests, const base::FilePath& output_file,
size_t(TestLauncher* test_launcher, const std::string& output,
const std::vector<std::string>& test_names)); const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout));
MOCK_METHOD3(GetCommandLine,
CommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir_,
FilePath* output_file_));
MOCK_METHOD1(IsPreTask, bool(const std::vector<std::string>& test_names));
MOCK_METHOD0(GetWrapper, std::string());
MOCK_METHOD0(GetLaunchOptions, int());
MOCK_METHOD0(GetTimeout, TimeDelta());
MOCK_METHOD0(GetBatchSize, size_t());
}; };
// Using MockTestLauncher to test TestLauncher. // Using MockTestLauncher to test TestLauncher.
...@@ -98,13 +116,25 @@ class TestLauncherTest : public testing::Test { ...@@ -98,13 +116,25 @@ class TestLauncherTest : public testing::Test {
} }
// Setup expected delegate calls, and which tests the delegate will return. // Setup expected delegate calls, and which tests the delegate will return.
void SetUpExpectCalls() { void SetUpExpectCalls(size_t batch_size = 10) {
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, GetTests(_)) EXPECT_CALL(delegate, GetTests(_))
.WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_), .WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_),
testing::Return(true))); testing::Return(true)));
EXPECT_CALL(delegate, WillRunTest(_, _)) EXPECT_CALL(delegate, WillRunTest(_, _))
.WillRepeatedly(testing::Return(true)); .WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, ProcessTestResults(_, _, _, _, _, _)).Times(0);
EXPECT_CALL(delegate, GetCommandLine(_, _, _))
.WillRepeatedly(testing::Return(CommandLine(CommandLine::NO_PROGRAM)));
EXPECT_CALL(delegate, GetWrapper())
.WillRepeatedly(testing::Return(std::string()));
EXPECT_CALL(delegate, IsPreTask(_)).WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, GetLaunchOptions())
.WillRepeatedly(testing::Return(true));
EXPECT_CALL(delegate, GetTimeout())
.WillRepeatedly(testing::Return(TimeDelta()));
EXPECT_CALL(delegate, GetBatchSize())
.WillRepeatedly(testing::Return(batch_size));
} }
void ReadSummary(FilePath path) { void ReadSummary(FilePath path) {
...@@ -127,18 +157,16 @@ class TestLauncherTest : public testing::Test { ...@@ -127,18 +157,16 @@ class TestLauncherTest : public testing::Test {
}; };
// Action to mock delegate invoking OnTestFinish on test launcher. // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P2(OnTestResult, full_name, status) { ACTION_P3(OnTestResult, launcher, full_name, status) {
TestResult result = GenerateTestResult(full_name, status); TestResult result = GenerateTestResult(full_name, status);
ThreadTaskRunnerHandle::Get()->PostTask( arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
FROM_HERE, Unretained(launcher), result));
BindOnce(&TestLauncher::OnTestFinished, Unretained(arg0), result));
} }
// Action to mock delegate invoking OnTestFinish on test launcher. // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P(OnTestResult, result) { ACTION_P2(OnTestResult, launcher, result) {
ThreadTaskRunnerHandle::Get()->PostTask( arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
FROM_HERE, Unretained(launcher), result));
BindOnce(&TestLauncher::OnTestFinished, Unretained(arg0), result));
} }
// A test and a disabled test cannot share a name. // A test and a disabled test cannot share a name.
...@@ -163,11 +191,11 @@ TEST_F(TestLauncherTest, OrphanePreTest) { ...@@ -163,11 +191,11 @@ TEST_F(TestLauncherTest, OrphanePreTest) {
EXPECT_FALSE(test_launcher.Run(command_line.get())); EXPECT_FALSE(test_launcher.Run(command_line.get()));
} }
// When There are no tests, RunLoop should not be called. // When There are no tests, delegate should not be called.
TEST_F(TestLauncherTest, EmptyTestSetPasses) { TEST_F(TestLauncherTest, EmptyTestSetPasses) {
SetUpExpectCalls(); SetUpExpectCalls();
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)).WillOnce(testing::Return(0)); EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _)).Times(0);
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -177,15 +205,17 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) { ...@@ -177,15 +205,17 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
AddMockedTests("Test", AddMockedTests("Test",
{"firstTest", "secondTest", "DISABLED_firstTestDisabled"}); {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
SetUpExpectCalls(); SetUpExpectCalls();
using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"}; std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
EXPECT_CALL(delegate, using ::testing::_;
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
tests_names.cend()))) _,
.WillOnce(::testing::DoAll( testing::ElementsAreArray(tests_names.cbegin(),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), tests_names.cend()),
OnTestResult("Test.secondTest", TestResult::TEST_SUCCESS), _))
testing::Return(2))); .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS),
OnTestResult(&test_launcher, "Test.secondTest",
TestResult::TEST_SUCCESS)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -193,13 +223,15 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) { ...@@ -193,13 +223,15 @@ TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
TEST_F(TestLauncherTest, ReorderPreTests) { TEST_F(TestLauncherTest, ReorderPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"}); AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
using ::testing::_;
std::vector<std::string> tests_names = { std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"}; "Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate, using ::testing::_;
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
tests_names.cend()))) _,
.WillOnce(testing::Return(0)); testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -211,12 +243,14 @@ TEST_F(TestLauncherTest, UsingCommandLineFilter) { ...@@ -211,12 +243,14 @@ TEST_F(TestLauncherTest, UsingCommandLineFilter) {
command_line->AppendSwitchASCII("gtest_filter", "Test*.first*"); command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
using ::testing::_; using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest"}; std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate, using ::testing::_;
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
tests_names.cend()))) _,
.WillOnce(::testing::DoAll( testing::ElementsAreArray(tests_names.cbegin(),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), tests_names.cend()),
testing::Return(1))); _))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -225,13 +259,15 @@ TEST_F(TestLauncherTest, FilterIncludePreTest) { ...@@ -225,13 +259,15 @@ TEST_F(TestLauncherTest, FilterIncludePreTest) {
AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"}); AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest"); command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.PRE_firstTest", std::vector<std::string> tests_names = {"Test.PRE_firstTest",
"Test.firstTest"}; "Test.firstTest"};
EXPECT_CALL(delegate, using ::testing::_;
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
tests_names.cend()))) _,
.WillOnce(testing::Return(0)); testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -241,11 +277,10 @@ TEST_F(TestLauncherTest, RunningMultipleIterations) { ...@@ -241,11 +277,10 @@ TEST_F(TestLauncherTest, RunningMultipleIterations) {
SetUpExpectCalls(); SetUpExpectCalls();
command_line->AppendSwitchASCII("gtest_repeat", "2"); command_line->AppendSwitchASCII("gtest_repeat", "2");
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.Times(2) .Times(2)
.WillRepeatedly(::testing::DoAll( .WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), TestResult::TEST_SUCCESS));
testing::Return(1)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -254,17 +289,16 @@ TEST_F(TestLauncherTest, SuccessOnRetryTests) { ...@@ -254,17 +289,16 @@ TEST_F(TestLauncherTest, SuccessOnRetryTests) {
AddMockedTests("Test", {"firstTest"}); AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE),
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"}; std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate, EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(), _,
tests_names.cend()))) testing::ElementsAreArray(tests_names.cbegin(),
.WillOnce(::testing::DoAll( tests_names.cend()),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), _))
testing::Return(1))); .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_FAILURE))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -274,18 +308,15 @@ TEST_F(TestLauncherTest, FailOnRetryTests) { ...@@ -274,18 +308,15 @@ TEST_F(TestLauncherTest, FailOnRetryTests) {
AddMockedTests("Test", {"firstTest"}); AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE),
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"}; std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_CALL(delegate, EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(), _,
tests_names.cend()))) testing::ElementsAreArray(tests_names.cbegin(),
.Times(3) tests_names.cend()),
.WillRepeatedly(::testing::DoAll( _))
OnTestResult("Test.firstTest", TestResult::TEST_FAILURE), .Times(4)
testing::Return(1))); .WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_FAILURE));
EXPECT_FALSE(test_launcher.Run(command_line.get())); EXPECT_FALSE(test_launcher.Run(command_line.get()));
} }
...@@ -293,23 +324,43 @@ TEST_F(TestLauncherTest, FailOnRetryTests) { ...@@ -293,23 +324,43 @@ TEST_F(TestLauncherTest, FailOnRetryTests) {
TEST_F(TestLauncherTest, RetryPreTests) { TEST_F(TestLauncherTest, RetryPreTests) {
AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"}); AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
std::vector<TestResult> results = {
GenerateTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
GenerateTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS)};
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
OnTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS),
testing::Return(3)));
std::vector<std::string> tests_names = {
"Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
EXPECT_CALL(delegate,
RetryTests(_, testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend())))
.WillOnce(::testing::DoAll( .WillOnce(::testing::DoAll(
OnTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS), OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
OnTestResult("Test.PRE_firstTest", TestResult::TEST_SUCCESS), TestResult::TEST_SUCCESS),
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), OnTestResult(&test_launcher, "Test.PRE_firstTest",
testing::Return(3))); TestResult::TEST_FAILURE),
OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS)));
std::vector<std::string> tests_names = {"Test.PRE_PRE_firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
TestResult::TEST_SUCCESS));
tests_names = {"Test.PRE_firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.PRE_firstTest",
TestResult::TEST_SUCCESS));
tests_names = {"Test.firstTest"};
EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
_,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
TestResult::TEST_SUCCESS));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -321,34 +372,38 @@ TEST_F(TestLauncherTest, RunDisabledTests) { ...@@ -321,34 +372,38 @@ TEST_F(TestLauncherTest, RunDisabledTests) {
SetUpExpectCalls(); SetUpExpectCalls();
command_line->AppendSwitch("gtest_also_run_disabled_tests"); command_line->AppendSwitch("gtest_also_run_disabled_tests");
command_line->AppendSwitchASCII("gtest_filter", "Test*.first*"); command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
using ::testing::_;
std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest", std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
"Test.firstTest", "Test.firstTest",
"Test.DISABLED_firstTestDisabled"}; "Test.DISABLED_firstTestDisabled"};
EXPECT_CALL(delegate, using ::testing::_;
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
tests_names.cend()))) _,
testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()),
_))
.WillOnce(::testing::DoAll( .WillOnce(::testing::DoAll(
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), OnTestResult(&test_launcher, "Test.firstTest",
OnTestResult("DISABLED_TestDisabled.firstTest",
TestResult::TEST_SUCCESS), TestResult::TEST_SUCCESS),
OnTestResult("Test.DISABLED_firstTestDisabled", OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
TestResult::TEST_SUCCESS), TestResult::TEST_SUCCESS),
testing::Return(3))); OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
TestResult::TEST_SUCCESS)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
// Disabled test should disable all pre tests // Disabled test should disable all pre tests
TEST_F(TestLauncherTest, DisablePreTests) { TEST_F(TestLauncherTest, DisablePreTests) {
AddMockedTests("Test", AddMockedTests("Test", {"DISABLED_firstTest", "PRE_PRE_firstTest",
{"DISABLED_firstTest", "PRE_PRE_firstTest", "PRE_firstTest"}); "PRE_firstTest", "secondTest"});
SetUpExpectCalls(); SetUpExpectCalls();
std::vector<std::string> tests_names = {"Test.secondTest"};
using ::testing::_; using ::testing::_;
std::vector<std::string> tests_names; EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
EXPECT_CALL(delegate, _,
RunTests(_, testing::ElementsAreArray(tests_names.cbegin(), testing::ElementsAreArray(tests_names.cbegin(),
tests_names.cend()))) tests_names.cend()),
.WillOnce(testing::Return(0)); _))
.Times(1);
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -356,21 +411,18 @@ TEST_F(TestLauncherTest, DisablePreTests) { ...@@ -356,21 +411,18 @@ TEST_F(TestLauncherTest, DisablePreTests) {
TEST_F(TestLauncherTest, FaultyShardSetup) { TEST_F(TestLauncherTest, FaultyShardSetup) {
command_line->AppendSwitchASCII("test-launcher-total-shards", "2"); command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
command_line->AppendSwitchASCII("test-launcher-shard-index", "2"); command_line->AppendSwitchASCII("test-launcher-shard-index", "2");
using ::testing::_;
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_FALSE(test_launcher.Run(command_line.get())); EXPECT_FALSE(test_launcher.Run(command_line.get()));
} }
// Shard index must be lesser than total shards // Shard index must be lesser than total shards
TEST_F(TestLauncherTest, RedirectStdio) { TEST_F(TestLauncherTest, RedirectStdio) {
AddMockedTests("Test", {"firstTest"});
SetUpExpectCalls(); SetUpExpectCalls();
command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always"); command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always");
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce(::testing::DoAll( .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
OnTestResult("Test.firstTest", TestResult::TEST_SUCCESS), TestResult::TEST_SUCCESS));
testing::Return(1)));
std::vector<std::string> tests_names = {"Test.firstTest"};
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
} }
...@@ -463,11 +515,11 @@ TEST_F(TestLauncherTest, JsonSummary) { ...@@ -463,11 +515,11 @@ TEST_F(TestLauncherTest, JsonSummary) {
TimeDelta::FromMilliseconds(50), "output_second"); TimeDelta::FromMilliseconds(50), "output_second");
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.Times(2) .Times(2)
.WillRepeatedly(::testing::DoAll(OnTestResult(first_result), .WillRepeatedly(
OnTestResult(second_result), ::testing::DoAll(OnTestResult(&test_launcher, first_result),
testing::Return(2))); OnTestResult(&test_launcher, second_result)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
// Validate the resulting JSON file is the expected output. // Validate the resulting JSON file is the expected output.
...@@ -514,9 +566,8 @@ TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) { ...@@ -514,9 +566,8 @@ TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) {
TimeDelta::FromMilliseconds(50), "output_second"); TimeDelta::FromMilliseconds(50), "output_second");
using ::testing::_; using ::testing::_;
EXPECT_CALL(delegate, RunTests(_, _)) EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _))
.WillOnce( .WillOnce(OnTestResult(&test_launcher, test_result));
::testing::DoAll(OnTestResult(test_result), testing::Return(1)));
EXPECT_TRUE(test_launcher.Run(command_line.get())); EXPECT_TRUE(test_launcher.Run(command_line.get()));
// Validate the resulting JSON file is the expected output. // Validate the resulting JSON file is the expected output.
......
...@@ -254,24 +254,21 @@ std::vector<TestResult> ProcessMissingTestResults( ...@@ -254,24 +254,21 @@ std::vector<TestResult> ProcessMissingTestResults(
return results; return results;
} }
// Interprets test results and reports to the test launcher. // Returns interpreted test results.
void ProcessTestResults(TestLauncher* test_launcher, std::vector<TestResult> UnitTestProcessTestResults(
const std::vector<std::string>& test_names, const std::vector<std::string>& test_names,
const base::FilePath& output_file, const base::FilePath& output_file,
const std::string& output, const std::string& output,
int exit_code, int exit_code,
bool was_timeout) { bool was_timeout) {
std::vector<TestResult> test_results; std::vector<TestResult> test_results;
bool crashed = false; bool crashed = false;
bool have_test_results = bool have_test_results =
ProcessGTestOutput(output_file, &test_results, &crashed); ProcessGTestOutput(output_file, &test_results, &crashed);
if (!have_test_results) { if (!have_test_results) {
test_results = return ProcessMissingTestResults(test_names, output, was_timeout,
ProcessMissingTestResults(test_names, output, was_timeout, exit_code); exit_code);
for (auto& test_result : test_results)
test_launcher->OnTestFinished(test_result);
return;
} }
// TODO(phajdan.jr): Check for duplicates and mismatches between // TODO(phajdan.jr): Check for duplicates and mismatches between
...@@ -340,145 +337,8 @@ void ProcessTestResults(TestLauncher* test_launcher, ...@@ -340,145 +337,8 @@ void ProcessTestResults(TestLauncher* test_launcher,
for (auto& i : final_results) { for (auto& i : final_results) {
// Fix the output snippet after possible changes to the test result. // Fix the output snippet after possible changes to the test result.
i.output_snippet = GetTestOutputSnippet(i, output); i.output_snippet = GetTestOutputSnippet(i, output);
test_launcher->OnTestFinished(i);
}
}
class UnitTestProcessLifetimeObserver : public ProcessLifetimeObserver {
public:
~UnitTestProcessLifetimeObserver() override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
} }
return final_results;
TestLauncher* test_launcher() { return test_launcher_; }
UnitTestPlatformDelegate* platform_delegate() { return platform_delegate_; }
const std::vector<std::string>& test_names() { return test_names_; }
int launch_flags() { return launch_flags_; }
const FilePath& output_file() { return output_file_; }
const FilePath& flag_file() { return flag_file_; }
protected:
UnitTestProcessLifetimeObserver(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file)
: ProcessLifetimeObserver(),
test_launcher_(test_launcher),
platform_delegate_(platform_delegate),
test_names_(test_names),
launch_flags_(launch_flags),
output_file_(output_file),
flag_file_(flag_file) {}
SEQUENCE_CHECKER(sequence_checker_);
private:
TestLauncher* const test_launcher_;
UnitTestPlatformDelegate* const platform_delegate_;
const std::vector<std::string> test_names_;
const int launch_flags_;
const FilePath output_file_;
const FilePath flag_file_;
DISALLOW_COPY_AND_ASSIGN(UnitTestProcessLifetimeObserver);
};
class ParallelUnitTestProcessLifetimeObserver
: public UnitTestProcessLifetimeObserver {
public:
ParallelUnitTestProcessLifetimeObserver(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file)
: UnitTestProcessLifetimeObserver(test_launcher,
platform_delegate,
test_names,
launch_flags,
output_file,
flag_file) {}
~ParallelUnitTestProcessLifetimeObserver() override = default;
private:
// ProcessLifetimeObserver:
void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override;
DISALLOW_COPY_AND_ASSIGN(ParallelUnitTestProcessLifetimeObserver);
};
void ParallelUnitTestProcessLifetimeObserver::OnCompleted(
int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ProcessTestResults(test_launcher(), test_names(), output_file(), output,
exit_code, was_timeout);
// The temporary file's directory is also temporary.
DeleteFile(output_file().DirName(), true);
if (!flag_file().empty())
DeleteFile(flag_file(), false);
}
class SerialUnitTestProcessLifetimeObserver
: public UnitTestProcessLifetimeObserver {
public:
SerialUnitTestProcessLifetimeObserver(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags,
const FilePath& output_file,
const FilePath& flag_file,
std::vector<std::string>&& next_test_names)
: UnitTestProcessLifetimeObserver(test_launcher,
platform_delegate,
test_names,
launch_flags,
output_file,
flag_file),
next_test_names_(std::move(next_test_names)) {}
~SerialUnitTestProcessLifetimeObserver() override = default;
private:
// ProcessLifetimeObserver:
void OnCompleted(int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override;
std::vector<std::string> next_test_names_;
DISALLOW_COPY_AND_ASSIGN(SerialUnitTestProcessLifetimeObserver);
};
void SerialUnitTestProcessLifetimeObserver::OnCompleted(
int exit_code,
TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ProcessTestResults(test_launcher(), test_names(), output_file(), output,
exit_code, was_timeout);
// The temporary file's directory is also temporary.
DeleteFile(output_file().DirName(), true);
if (!flag_file().empty())
DeleteFile(flag_file(), false);
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&RunUnitTestsSerially, test_launcher(), platform_delegate(),
std::move(next_test_names_), launch_flags()));
} }
} // namespace } // namespace
...@@ -534,77 +394,6 @@ int LaunchUnitTests(int argc, ...@@ -534,77 +394,6 @@ int LaunchUnitTests(int argc,
} }
#endif // defined(OS_WIN) #endif // defined(OS_WIN)
void RunUnitTestsSerially(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags) {
if (test_names.empty())
return;
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
FilePath output_file;
CHECK(platform_delegate->CreateResultsFile(&output_file));
FilePath flag_file;
platform_delegate->CreateTemporaryFile(&flag_file);
auto observer = std::make_unique<SerialUnitTestProcessLifetimeObserver>(
test_launcher, platform_delegate,
std::vector<std::string>(1, test_names.back()), launch_flags, output_file,
flag_file,
std::vector<std::string>(test_names.begin(), test_names.end() - 1));
CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
observer->test_names(), output_file, flag_file));
TestLauncher::LaunchOptions launch_options;
launch_options.flags = launch_flags;
test_launcher->LaunchChildGTestProcess(
cmd_line, platform_delegate->GetWrapperForChildGTestProcess(),
TestTimeouts::test_launcher_timeout(), launch_options,
std::move(observer));
}
void RunUnitTestsBatch(
TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags) {
if (test_names.empty())
return;
// Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple
// processes in parallel.
FilePath output_file;
CHECK(platform_delegate->CreateResultsFile(&output_file));
FilePath flag_file;
platform_delegate->CreateTemporaryFile(&flag_file);
auto observer = std::make_unique<ParallelUnitTestProcessLifetimeObserver>(
test_launcher, platform_delegate, test_names, launch_flags, output_file,
flag_file);
CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
test_names, output_file, flag_file));
// Adjust the timeout depending on how many tests we're running
// (note that e.g. the last batch of tests will be smaller).
// TODO(phajdan.jr): Consider an adaptive timeout, which can change
// depending on how many tests ran and how many remain.
// Note: do NOT parse child's stdout to do that, it's known to be
// unreliable (e.g. buffering issues can mix up the output).
TimeDelta timeout = test_names.size() * TestTimeouts::test_launcher_timeout();
TestLauncher::LaunchOptions options;
options.flags = launch_flags;
test_launcher->LaunchChildGTestProcess(
cmd_line, platform_delegate->GetWrapperForChildGTestProcess(), timeout,
options, std::move(observer));
}
DefaultUnitTestPlatformDelegate::DefaultUnitTestPlatformDelegate() = default; DefaultUnitTestPlatformDelegate::DefaultUnitTestPlatformDelegate() = default;
bool DefaultUnitTestPlatformDelegate::GetTests( bool DefaultUnitTestPlatformDelegate::GetTests(
...@@ -613,18 +402,21 @@ bool DefaultUnitTestPlatformDelegate::GetTests( ...@@ -613,18 +402,21 @@ bool DefaultUnitTestPlatformDelegate::GetTests(
return true; return true;
} }
bool DefaultUnitTestPlatformDelegate::CreateResultsFile(base::FilePath* path) { bool DefaultUnitTestPlatformDelegate::CreateResultsFile(
if (!CreateNewTempDirectory(FilePath::StringType(), path)) const base::FilePath& temp_dir,
base::FilePath* path) {
if (!CreateTemporaryDirInDir(temp_dir, FilePath::StringType(), path))
return false; return false;
*path = path->AppendASCII("test_results.xml"); *path = path->AppendASCII("test_results.xml");
return true; return true;
} }
bool DefaultUnitTestPlatformDelegate::CreateTemporaryFile( bool DefaultUnitTestPlatformDelegate::CreateTemporaryFile(
const base::FilePath& temp_dir,
base::FilePath* path) { base::FilePath* path) {
if (!temp_dir_.IsValid() && !temp_dir_.CreateUniqueTempDir()) if (temp_dir.empty())
return false; return false;
return CreateTemporaryFileInDir(temp_dir_.GetPath(), path); return CreateTemporaryFileInDir(temp_dir, path);
} }
CommandLine DefaultUnitTestPlatformDelegate::GetCommandLineForChildGTestProcess( CommandLine DefaultUnitTestPlatformDelegate::GetCommandLineForChildGTestProcess(
...@@ -678,38 +470,48 @@ bool UnitTestLauncherDelegate::WillRunTest(const std::string& test_case_name, ...@@ -678,38 +470,48 @@ bool UnitTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true; return true;
} }
size_t UnitTestLauncherDelegate::RunTests( std::vector<TestResult> UnitTestLauncherDelegate::ProcessTestResults(
TestLauncher* test_launcher, const std::vector<std::string>& test_names,
const std::vector<std::string>& test_names) { const base::FilePath& output_file,
DCHECK(thread_checker_.CalledOnValidThread()); const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) {
return UnitTestProcessTestResults(test_names, output_file, output, exit_code,
was_timeout);
}
int launch_flags = use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0; CommandLine UnitTestLauncherDelegate::GetCommandLine(
const std::vector<std::string>& test_names,
const FilePath& temp_dir,
FilePath* output_file) {
CHECK(!test_names.empty());
std::vector<std::string> batch; // Create a dedicated temporary directory to store the xml result data
for (const auto& i : test_names) { // per run to ensure clean state and make it possible to launch multiple
batch.push_back(i); // processes in parallel.
CHECK(platform_delegate_->CreateResultsFile(temp_dir, output_file));
FilePath flag_file;
platform_delegate_->CreateTemporaryFile(temp_dir, &flag_file);
// Use 0 to indicate unlimited batch size. return CommandLine(platform_delegate_->GetCommandLineForChildGTestProcess(
if (batch.size() >= batch_limit_ && batch_limit_ != 0) { test_names, *output_file, flag_file));
RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags); }
batch.clear();
}
}
RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags); std::string UnitTestLauncherDelegate::GetWrapper() {
return platform_delegate_->GetWrapperForChildGTestProcess();
}
int UnitTestLauncherDelegate::GetLaunchOptions() {
return use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0;
}
return test_names.size(); TimeDelta UnitTestLauncherDelegate::GetTimeout() {
return TestTimeouts::test_launcher_timeout();
} }
size_t UnitTestLauncherDelegate::RetryTests( size_t UnitTestLauncherDelegate::GetBatchSize() {
TestLauncher* test_launcher, return batch_limit_;
const std::vector<std::string>& test_names) {
ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
BindOnce(&RunUnitTestsSerially, test_launcher, platform_delegate_,
test_names,
use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0));
return test_names.size();
} }
} // namespace base } // namespace base
...@@ -60,11 +60,13 @@ class UnitTestPlatformDelegate { ...@@ -60,11 +60,13 @@ class UnitTestPlatformDelegate {
// Called to create a temporary for storing test results. The delegate // Called to create a temporary for storing test results. The delegate
// must put the resulting path in |path| and return true on success. // must put the resulting path in |path| and return true on success.
virtual bool CreateResultsFile(base::FilePath* path) = 0; virtual bool CreateResultsFile(const base::FilePath& temp_dir,
base::FilePath* path) = 0;
// Called to create a new temporary file. The delegate must put the resulting // Called to create a new temporary file. The delegate must put the resulting
// path in |path| and return true on success. // path in |path| and return true on success.
virtual bool CreateTemporaryFile(base::FilePath* path) = 0; virtual bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) = 0;
// Returns command line for child GTest process based on the command line // Returns command line for child GTest process based on the command line
// of current process. |test_names| is a vector of test full names // of current process. |test_names| is a vector of test full names
...@@ -95,9 +97,11 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate { ...@@ -95,9 +97,11 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate {
bool GetTests(std::vector<TestIdentifier>* output) override; bool GetTests(std::vector<TestIdentifier>* output) override;
bool CreateResultsFile(base::FilePath* path) override; bool CreateResultsFile(const base::FilePath& temp_dir,
base::FilePath* path) override;
bool CreateTemporaryFile(base::FilePath* path) override; bool CreateTemporaryFile(const base::FilePath& temp_dir,
base::FilePath* path) override;
CommandLine GetCommandLineForChildGTestProcess( CommandLine GetCommandLineForChildGTestProcess(
const std::vector<std::string>& test_names, const std::vector<std::string>& test_names,
...@@ -111,18 +115,6 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate { ...@@ -111,18 +115,6 @@ class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate {
DISALLOW_COPY_AND_ASSIGN(DefaultUnitTestPlatformDelegate); DISALLOW_COPY_AND_ASSIGN(DefaultUnitTestPlatformDelegate);
}; };
// Runs tests serially, each in its own process.
void RunUnitTestsSerially(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags);
// Runs tests in batches (each batch in its own process).
void RunUnitTestsBatch(TestLauncher* test_launcher,
UnitTestPlatformDelegate* platform_delegate,
const std::vector<std::string>& test_names,
int launch_flags);
// Test launcher delegate for unit tests (mostly to support batching). // Test launcher delegate for unit tests (mostly to support batching).
class UnitTestLauncherDelegate : public TestLauncherDelegate { class UnitTestLauncherDelegate : public TestLauncherDelegate {
public: public:
...@@ -136,10 +128,26 @@ class UnitTestLauncherDelegate : public TestLauncherDelegate { ...@@ -136,10 +128,26 @@ class UnitTestLauncherDelegate : public TestLauncherDelegate {
bool GetTests(std::vector<TestIdentifier>* output) override; bool GetTests(std::vector<TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name, bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override; const std::string& test_name) override;
size_t RunTests(TestLauncher* test_launcher,
const std::vector<std::string>& test_names) override; std::vector<TestResult> ProcessTestResults(
size_t RetryTests(TestLauncher* test_launcher, const std::vector<std::string>& test_names,
const std::vector<std::string>& test_names) override; const base::FilePath& output_file,
const std::string& output,
const base::TimeDelta& elapsed_time,
int exit_code,
bool was_timeout) override;
CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const FilePath& temp_dir,
FilePath* output_file) override;
std::string GetWrapper() override;
int GetLaunchOptions() override;
TimeDelta GetTimeout() override;
size_t GetBatchSize() override;
ThreadChecker thread_checker_; ThreadChecker thread_checker_;
......
...@@ -19,64 +19,6 @@ ...@@ -19,64 +19,6 @@
namespace base { namespace base {
namespace { namespace {
// Unit tests to validate DefaultUnitTestPlatformDelegate implementations.
class DefaultUnitTestPlatformDelegateTester : public testing::Test {
protected:
UnitTestPlatformDelegate* platformDelegate;
FilePath flag_path;
FilePath output_path;
std::vector<std::string> test_names;
void SetUp() override { platformDelegate = &defaultPlatform_; }
private:
DefaultUnitTestPlatformDelegate defaultPlatform_;
};
// Call fails when flag_file does not exist.
TEST_F(DefaultUnitTestPlatformDelegateTester, FlagPathCheckFail) {
ASSERT_CHECK_DEATH(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
}
// Validate flags are set correctly in by the delegate.
TEST_F(DefaultUnitTestPlatformDelegateTester,
GetCommandLineForChildGTestProcess) {
ASSERT_TRUE(platformDelegate->CreateResultsFile(&output_path));
ASSERT_TRUE(platformDelegate->CreateTemporaryFile(&flag_path));
CommandLine cmd_line(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
EXPECT_EQ(cmd_line.GetSwitchValueASCII("test-launcher-output"),
output_path.MaybeAsASCII());
EXPECT_EQ(cmd_line.GetSwitchValueASCII("gtest_flagfile"),
flag_path.MaybeAsASCII());
EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
}
// Validate the tests are saved correctly in flag file under
// the "--gtest_filter" flag.
TEST_F(DefaultUnitTestPlatformDelegateTester, GetCommandLineFilterTest) {
test_names.push_back("Test1");
test_names.push_back("Test2");
ASSERT_TRUE(platformDelegate->CreateResultsFile(&output_path));
ASSERT_TRUE(platformDelegate->CreateTemporaryFile(&flag_path));
CommandLine cmd_line(platformDelegate->GetCommandLineForChildGTestProcess(
test_names, output_path, flag_path));
const int size = 2048;
std::string content;
ASSERT_TRUE(ReadFileToStringWithMaxSize(flag_path, &content, size));
EXPECT_EQ(content.find("--gtest_filter="), 0u);
base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
std::vector<std::string> gtest_filter_tests =
SplitString(content, ":", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
for (unsigned i = 0; i < test_names.size(); i++) {
EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
}
}
// Mock TestLauncher to validate LaunchChildGTestProcess // Mock TestLauncher to validate LaunchChildGTestProcess
// is called correctly inside the test launcher delegate. // is called correctly inside the test launcher delegate.
class MockTestLauncher : public TestLauncher { class MockTestLauncher : public TestLauncher {
...@@ -85,81 +27,52 @@ class MockTestLauncher : public TestLauncher { ...@@ -85,81 +27,52 @@ class MockTestLauncher : public TestLauncher {
size_t parallel_jobs) size_t parallel_jobs)
: TestLauncher(launcher_delegate, parallel_jobs) {} : TestLauncher(launcher_delegate, parallel_jobs) {}
MOCK_METHOD5(LaunchChildGTestProcess, MOCK_METHOD3(LaunchChildGTestProcess,
void(const CommandLine& command_line, void(scoped_refptr<TaskRunner> task_runner,
const std::string& wrapper, const std::vector<std::string>& test_names,
TimeDelta timeout, const FilePath& temp_dir));
const LaunchOptions& options,
std::unique_ptr<ProcessLifetimeObserver> observer));
}; };
// Unit tests to validate UnitTestLauncherDelegateTester implementations. // Unit tests to validate UnitTestLauncherDelegateTester implementations.
class UnitTestLauncherDelegateTester : public testing::Test { class UnitTestLauncherDelegateTester : public testing::Test {
protected: protected:
TestLauncherDelegate* launcherDelegate;
MockTestLauncher* launcher;
std::vector<std::string> tests;
void SetUp() override { tests.assign(100, "Test"); }
// Setup test launcher delegate with a particular batch size.
void SetUpLauncherDelegate(size_t batch_size) {
launcherDelegate =
new UnitTestLauncherDelegate(&defaultPlatform, batch_size, true);
launcher = new MockTestLauncher(launcherDelegate, batch_size);
}
// Validate LaunchChildGTestProcess is called x number of times.
void ValidateChildGTestProcessCalls(int times_called) {
using ::testing::_;
EXPECT_CALL(*launcher, LaunchChildGTestProcess(_, _, _, _, _))
.Times(times_called);
}
void TearDown() override {
delete launcherDelegate;
delete launcher;
}
private:
DefaultUnitTestPlatformDelegate defaultPlatform; DefaultUnitTestPlatformDelegate defaultPlatform;
}; };
// Validate 0 batch size corresponds to unlimited batch size. // Validate delegate produces correct command line.
TEST_F(UnitTestLauncherDelegateTester, RunTestsWithUnlimitedBatchSize) { TEST_F(UnitTestLauncherDelegateTester, GetCommandLine) {
SetUpLauncherDelegate(0); UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 10u, true);
TestLauncherDelegate* delegate_ptr = &launcher_delegate;
ValidateChildGTestProcessCalls(1);
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size());
}
// Validate edge case, no tests to run.
TEST_F(UnitTestLauncherDelegateTester, RunTestsWithEmptyTests) {
SetUpLauncherDelegate(0);
ValidateChildGTestProcessCalls(0); std::vector<std::string> test_names(5, "Tests");
tests.clear(); base::FilePath temp_dir;
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size()); base::FilePath result_file;
} CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
// Validate delegate slices batch size correctly. CommandLine cmd_line =
TEST_F(UnitTestLauncherDelegateTester, RunTestsBatchSize10) { delegate_ptr->GetCommandLine(test_names, temp_dir, &result_file);
SetUpLauncherDelegate(10); EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
EXPECT_EQ(cmd_line.GetSwitchValuePath("test-launcher-output"), result_file);
ValidateChildGTestProcessCalls(10); const int size = 2048;
EXPECT_EQ(launcherDelegate->RunTests(launcher, tests), tests.size()); std::string content;
ASSERT_TRUE(ReadFileToStringWithMaxSize(
cmd_line.GetSwitchValuePath("gtest_flagfile"), &content, size));
EXPECT_EQ(content.find("--gtest_filter="), 0u);
base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
std::vector<std::string> gtest_filter_tests =
SplitString(content, ":", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
for (unsigned i = 0; i < test_names.size(); i++) {
EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
}
} }
// ValidateRetryTests will only kick-off one run. // Validate delegate sets batch size correctly.
TEST_F(UnitTestLauncherDelegateTester, RetryTests) { TEST_F(UnitTestLauncherDelegateTester, BatchSize) {
// ScopedTaskEviorment is needed since RetryTests uses thread task UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 15u, true);
// runner to start. TestLauncherDelegate* delegate_ptr = &launcher_delegate;
test::ScopedTaskEnvironment task_environment; EXPECT_EQ(delegate_ptr->GetBatchSize(), 15u);
SetUpLauncherDelegate(10);
ValidateChildGTestProcessCalls(1);
EXPECT_EQ(launcherDelegate->RetryTests(launcher, tests), tests.size());
RunLoop().RunUntilIdle();
} }
} // namespace } // namespace
......
...@@ -82,13 +82,6 @@ TestLauncherDelegate* g_launcher_delegate = nullptr; ...@@ -82,13 +82,6 @@ TestLauncherDelegate* g_launcher_delegate = nullptr;
ContentMainParams* g_params = nullptr; ContentMainParams* g_params = nullptr;
#endif #endif
std::string RemoveAnyPrePrefixes(const std::string& test_name) {
std::string result(test_name);
base::ReplaceSubstringsAfterOffset(
&result, 0, kPreTestPrefix, base::StringPiece());
return result;
}
void PrintUsage() { void PrintUsage() {
fprintf(stdout, fprintf(stdout,
"Runs tests using the gtest framework, each batch of tests being\n" "Runs tests using the gtest framework, each batch of tests being\n"
...@@ -135,70 +128,28 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -135,70 +128,28 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
public: public:
explicit WrapperTestLauncherDelegate( explicit WrapperTestLauncherDelegate(
content::TestLauncherDelegate* launcher_delegate) content::TestLauncherDelegate* launcher_delegate)
: launcher_delegate_(launcher_delegate) { : launcher_delegate_(launcher_delegate) {}
CHECK(temp_dir_.CreateUniqueTempDir());
}
// base::TestLauncherDelegate: // base::TestLauncherDelegate:
bool GetTests(std::vector<base::TestIdentifier>* output) override; bool GetTests(std::vector<base::TestIdentifier>* output) override;
bool WillRunTest(const std::string& test_case_name, bool WillRunTest(const std::string& test_case_name,
const std::string& test_name) override; const std::string& test_name) override;
size_t RunTests(base::TestLauncher* test_launcher, base::CommandLine GetCommandLine(const std::vector<std::string>& test_names,
const std::vector<std::string>& test_names) override; const base::FilePath& temp_dir,
size_t RetryTests(base::TestLauncher* test_launcher, base::FilePath* output_file) override;
const std::vector<std::string>& test_names) override;
private:
class ChildProcessLifetimeObserver : public base::ProcessLifetimeObserver {
public:
ChildProcessLifetimeObserver(
WrapperTestLauncherDelegate* test_launcher_delegate,
base::TestLauncher* test_launcher,
std::vector<std::string>&& next_test_names,
const std::string& test_name,
const base::FilePath& output_file)
: base::ProcessLifetimeObserver(),
test_launcher_delegate_(test_launcher_delegate),
test_launcher_(test_launcher),
next_test_names_(std::move(next_test_names)),
test_name_(test_name),
output_file_(output_file) {}
~ChildProcessLifetimeObserver() override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
private:
void OnTimedOut(const base::CommandLine& command_line) override {
test_launcher_delegate_->OnTestTimedOut(command_line);
}
void OnCompleted(int exit_code, size_t GetBatchSize() override;
base::TimeDelta elapsed_time,
bool was_timeout,
const std::string& output) override {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
test_launcher_delegate_->GTestCallback(
test_launcher_, next_test_names_, test_name_, output_file_, exit_code,
elapsed_time, was_timeout, output);
}
SEQUENCE_CHECKER(sequence_checker_); std::string GetWrapper() override;
WrapperTestLauncherDelegate* test_launcher_delegate_;
base::TestLauncher* test_launcher_;
std::vector<std::string> next_test_names_;
std::string test_name_;
base::FilePath output_file_;
DISALLOW_COPY_AND_ASSIGN(ChildProcessLifetimeObserver); int GetLaunchOptions() override;
};
void DoRunTests(base::TestLauncher* test_launcher, base::TimeDelta GetTimeout() override;
const std::vector<std::string>& test_names);
private:
// Relays timeout notification from the TestLauncher (by way of a // Relays timeout notification from the TestLauncher (by way of a
// ProcessLifetimeObserver) to the caller's content::TestLauncherDelegate. // ProcessLifetimeObserver) to the caller's content::TestLauncherDelegate.
void OnTestTimedOut(const base::CommandLine& command_line); void OnTestTimedOut(const base::CommandLine& command_line) override;
// Callback to receive result of a test. // Callback to receive result of a test.
// |output_file| is a path to xml file written by test-launcher // |output_file| is a path to xml file written by test-launcher
...@@ -206,28 +157,17 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate { ...@@ -206,28 +157,17 @@ class WrapperTestLauncherDelegate : public base::TestLauncherDelegate {
// EXPECT/ASSERT/DCHECK statements. Test launcher parses that // EXPECT/ASSERT/DCHECK statements. Test launcher parses that
// file to get additional information about test run (status, // file to get additional information about test run (status,
// error-messages, stack-traces and file/line for failures). // error-messages, stack-traces and file/line for failures).
void GTestCallback(base::TestLauncher* test_launcher, std::vector<base::TestResult> ProcessTestResults(
const std::vector<std::string>& test_names, const std::vector<std::string>& test_names,
const std::string& test_name, const base::FilePath& output_file,
const base::FilePath& output_file, const std::string& output,
int exit_code, const base::TimeDelta& elapsed_time,
const base::TimeDelta& elapsed_time, int exit_code,
bool was_timeout, bool was_timeout) override;
const std::string& output);
content::TestLauncherDelegate* launcher_delegate_; content::TestLauncherDelegate* launcher_delegate_;
// Store unique data directory prefix for test names (without PRE_ prefixes).
// PRE_ tests and tests that depend on them must share the same
// data directory. Using test name as directory name leads to too long
// names (exceeding UNIX_PATH_MAX, which creates a problem with
// process_singleton_linux). Create a randomly-named temporary directory
// and keep track of the names so that PRE_ tests can still re-use them.
typedef std::map<std::string, base::FilePath> UserDataDirMap;
UserDataDirMap user_data_dir_map_;
// Temporary directory for user data directories.
base::ScopedTempDir temp_dir_;
DISALLOW_COPY_AND_ASSIGN(WrapperTestLauncherDelegate); DISALLOW_COPY_AND_ASSIGN(WrapperTestLauncherDelegate);
}; };
...@@ -253,81 +193,26 @@ bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name, ...@@ -253,81 +193,26 @@ bool WrapperTestLauncherDelegate::WillRunTest(const std::string& test_case_name,
return true; return true;
} }
size_t WrapperTestLauncherDelegate::RunTests( size_t WrapperTestLauncherDelegate::GetBatchSize() {
base::TestLauncher* test_launcher, return 1u;
const std::vector<std::string>& test_names) {
user_data_dir_map_.clear();
std::vector<std::string> test_list;
for (const std::string& test_name : test_names) {
// Stack all dependent tests and run them sequentially.
test_list.push_back(test_name);
if (!IsPreTestName(test_name)) {
if (!base::Contains(user_data_dir_map_, test_name)) {
base::FilePath temp_dir;
CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(),
FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[test_name] = temp_dir;
}
DoRunTests(test_launcher, test_list);
test_list.clear();
}
}
return test_names.size();
}
size_t WrapperTestLauncherDelegate::RetryTests(
base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names) {
// Discard user data directories from any previous runs. Start with
// fresh state.
for (const auto& it : user_data_dir_map_) {
// Delete temporary directories now to avoid using too much space in /tmp.
if (!base::DeleteFile(it.second, true)) {
LOG(WARNING) << "Failed to delete " << it.second.value();
}
}
user_data_dir_map_.clear();
for (const std::string& full_name : test_names) {
// Make sure PRE_ tests and tests that depend on them share the same
// data directory - based it on the test name without prefixes.
std::string test_name_no_pre(RemoveAnyPrePrefixes(full_name));
if (!base::Contains(user_data_dir_map_, test_name_no_pre)) {
base::FilePath temp_dir;
CHECK(base::CreateTemporaryDirInDir(temp_dir_.GetPath(),
FILE_PATH_LITERAL("d"), &temp_dir));
user_data_dir_map_[test_name_no_pre] = temp_dir;
}
}
DoRunTests(test_launcher, test_names);
return test_names.size();
} }
void WrapperTestLauncherDelegate::DoRunTests( base::CommandLine WrapperTestLauncherDelegate::GetCommandLine(
base::TestLauncher* test_launcher, const std::vector<std::string>& test_names,
const std::vector<std::string>& test_names) { const base::FilePath& temp_dir,
if (test_names.empty()) base::FilePath* output_file) {
return; DCHECK_EQ(1u, test_names.size());
std::string test_name(test_names.front()); std::string test_name(test_names.front());
std::vector<std::string> test_names_copy( // Chained pre tests must share the same temp directory,
test_names.begin() + 1, test_names.end()); // TestLauncher should guarantee that for the delegate.
base::FilePath user_data_dir = temp_dir.AppendASCII("user_data");
std::string test_name_no_pre(RemoveAnyPrePrefixes(test_name)); CreateDirectory(user_data_dir);
base::CommandLine cmd_line(*base::CommandLine::ForCurrentProcess()); base::CommandLine cmd_line(*base::CommandLine::ForCurrentProcess());
base::TestLauncher::LaunchOptions test_launch_options;
test_launch_options.flags = base::TestLauncher::USE_JOB_OBJECTS |
base::TestLauncher::ALLOW_BREAKAWAY_FROM_JOB;
launcher_delegate_->PreRunTest(); launcher_delegate_->PreRunTest();
CHECK(launcher_delegate_->AdjustChildProcessCommandLine( CHECK(launcher_delegate_->AdjustChildProcessCommandLine(&cmd_line,
&cmd_line, user_data_dir_map_[test_name_no_pre])); user_data_dir));
base::CommandLine new_cmd_line(cmd_line.GetProgram()); base::CommandLine new_cmd_line(cmd_line.GetProgram());
base::CommandLine::SwitchMap switches = cmd_line.GetSwitches(); base::CommandLine::SwitchMap switches = cmd_line.GetSwitches();
// Strip out gtest_output flag because otherwise we would overwrite results // Strip out gtest_output flag because otherwise we would overwrite results
// of the other tests. // of the other tests.
switches.erase(base::kGTestOutputFlag); switches.erase(base::kGTestOutputFlag);
...@@ -335,12 +220,11 @@ void WrapperTestLauncherDelegate::DoRunTests( ...@@ -335,12 +220,11 @@ void WrapperTestLauncherDelegate::DoRunTests(
// Create a dedicated temporary directory to store the xml result data // Create a dedicated temporary directory to store the xml result data
// per run to ensure clean state and make it possible to launch multiple // per run to ensure clean state and make it possible to launch multiple
// processes in parallel. // processes in parallel.
base::FilePath output_file; CHECK(base::CreateTemporaryDirInDir(temp_dir, FILE_PATH_LITERAL("results"),
CHECK(base::CreateTemporaryDirInDir( output_file));
temp_dir_.GetPath(), FILE_PATH_LITERAL("results"), &output_file)); *output_file = output_file->AppendASCII("test_results.xml");
output_file = output_file.AppendASCII("test_results.xml");
new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, output_file); new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, *output_file);
for (base::CommandLine::SwitchMap::const_iterator iter = switches.begin(); for (base::CommandLine::SwitchMap::const_iterator iter = switches.begin();
iter != switches.end(); ++iter) { iter != switches.end(); ++iter) {
...@@ -352,18 +236,21 @@ void WrapperTestLauncherDelegate::DoRunTests( ...@@ -352,18 +236,21 @@ void WrapperTestLauncherDelegate::DoRunTests(
new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests"); new_cmd_line.AppendSwitch("gtest_also_run_disabled_tests");
new_cmd_line.AppendSwitchASCII("gtest_filter", test_name); new_cmd_line.AppendSwitchASCII("gtest_filter", test_name);
new_cmd_line.AppendSwitch(kSingleProcessTestsFlag); new_cmd_line.AppendSwitch(kSingleProcessTestsFlag);
return new_cmd_line;
}
std::string WrapperTestLauncherDelegate::GetWrapper() {
char* browser_wrapper = getenv("BROWSER_WRAPPER"); char* browser_wrapper = getenv("BROWSER_WRAPPER");
return browser_wrapper ? browser_wrapper : std::string();
}
auto observer = std::make_unique<ChildProcessLifetimeObserver>( int WrapperTestLauncherDelegate::GetLaunchOptions() {
this, test_launcher, std::move(test_names_copy), test_name, output_file); return base::TestLauncher::USE_JOB_OBJECTS |
base::TestLauncher::ALLOW_BREAKAWAY_FROM_JOB;
}
// Must use test_launcher_timeout() here because this process is allowed to base::TimeDelta WrapperTestLauncherDelegate::GetTimeout() {
// assume that it can use an entire action_max_timeout() in its lifespan. return TestTimeouts::test_launcher_timeout();
test_launcher->LaunchChildGTestProcess(
new_cmd_line, browser_wrapper ? browser_wrapper : std::string(),
TestTimeouts::test_launcher_timeout(), test_launch_options,
std::move(observer));
} }
void WrapperTestLauncherDelegate::OnTestTimedOut( void WrapperTestLauncherDelegate::OnTestTimedOut(
...@@ -371,16 +258,16 @@ void WrapperTestLauncherDelegate::OnTestTimedOut( ...@@ -371,16 +258,16 @@ void WrapperTestLauncherDelegate::OnTestTimedOut(
launcher_delegate_->OnTestTimedOut(command_line); launcher_delegate_->OnTestTimedOut(command_line);
} }
void WrapperTestLauncherDelegate::GTestCallback( std::vector<base::TestResult> WrapperTestLauncherDelegate::ProcessTestResults(
base::TestLauncher* test_launcher,
const std::vector<std::string>& test_names, const std::vector<std::string>& test_names,
const std::string& test_name,
const base::FilePath& output_file, const base::FilePath& output_file,
int exit_code, const std::string& output,
const base::TimeDelta& elapsed_time, const base::TimeDelta& elapsed_time,
bool was_timeout, int exit_code,
const std::string& output) { bool was_timeout) {
base::TestResult result; base::TestResult result;
DCHECK_EQ(1u, test_names.size());
std::string test_name = test_names.front();
result.full_name = test_name; result.full_name = test_name;
bool crashed = false; bool crashed = false;
...@@ -430,19 +317,7 @@ void WrapperTestLauncherDelegate::GTestCallback( ...@@ -430,19 +317,7 @@ void WrapperTestLauncherDelegate::GTestCallback(
launcher_delegate_->PostRunTest(&result); launcher_delegate_->PostRunTest(&result);
// No other tests depend on this, we can delete the temporary directory now. return std::vector<base::TestResult>({result});
// Do so to avoid too many temporary files using lots of disk space.
if (base::Contains(user_data_dir_map_, test_name)) {
if (!base::DeleteFile(user_data_dir_map_[test_name], true)) {
LOG(WARNING) << "Failed to delete "
<< user_data_dir_map_[test_name].value();
}
user_data_dir_map_.erase(test_name);
}
test_launcher->OnTestFinished(result);
DoRunTests(test_launcher, test_names);
} }
} // namespace } // namespace
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment