Commit 18fa16bb authored by Xiyuan Xia's avatar Xiyuan Xia Committed by Commit Bot

Make gtest perf test generate LUCI test result json

Make perf test generate LUCI test result json when trace is enabled
so that performance test script could pick it up and extract
metrics out of the trace file.

Test result json follows the TestResultEntry spec in LUCI Test
Results format (go/luci-test-results-design). Outputting in
this format will allow reusing the code for results processor (
https://docs.google.com/document/d/1GYiqUJGxdUWtg9YQMXEXSILylG4J2wpiGIZM8F-HD_U
) for gtest perf tests.

Bug: 945711
Change-Id: I405b3aeab1217d5f87a9c7d3eba8f5e8eabac4dc
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1790122
Commit-Queue: Xiyuan Xia <xiyuan@chromium.org>
Reviewed-by: default avatarScott Violet <sky@chromium.org>
Reviewed-by: default avatarBrian Sheedy <bsheedy@chromium.org>
Reviewed-by: default avatarJuan Antonio Navarro Pérez <perezju@chromium.org>
Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Reviewed-by: default avatarJao-ke Chin-Lee <jchinlee@chromium.org>
Cr-Commit-Position: refs/heads/master@{#705302}
parent ddac8d88
......@@ -5,6 +5,7 @@
#include "ash/shell/content/test/ash_content_test.h"
#include <utility>
#include <vector>
#include "ash/public/cpp/shelf_config.h"
#include "ash/shell.h"
......@@ -27,6 +28,7 @@
#include "base/system/sys_info.h"
#include "base/task/post_task.h"
#include "base/test/test_file_util.h"
#include "base/threading/thread_restrictions.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "chromeos/constants/chromeos_switches.h"
......@@ -34,6 +36,7 @@
#include "content/public/browser/tracing_controller.h"
#include "content/public/test/browser_test_utils.h"
#include "services/tracing/public/cpp/trace_event_agent.h"
#include "testing/perf/luci_test_result.h"
#include "ui/aura/window_tracker.h"
#include "ui/compositor/compositor_switches.h"
#include "ui/display/display_switches.h"
......@@ -56,15 +59,29 @@ float GetHistogramMean(const std::string& name) {
return static_cast<float>(samples->sum()) / samples->TotalCount();
}
perf_test::LuciTestResult CreateTestResult(
const base::FilePath& trace_file,
const std::vector<std::string>& tbm_metrics) {
perf_test::LuciTestResult result =
perf_test::LuciTestResult::CreateForGTest();
result.AddOutputArtifactFile("trace/1", trace_file, "application/json");
for (auto& metric : tbm_metrics)
result.AddTag("tbmv2", metric);
return result;
}
} // namespace
class AshContentTest::Tracer {
public:
Tracer(base::FilePath trace_dir,
std::string tracing_categories,
std::vector<std::string> histograms)
std::vector<std::string> histograms,
std::vector<std::string> tbm_metrics)
: trace_dir_(std::move(trace_dir)),
tracing_categories_(std::move(tracing_categories)) {
tracing_categories_(std::move(tracing_categories)),
tbm_metrics_(std::move(tbm_metrics)) {
auto* controller = content::TracingController::GetInstance();
base::trace_event::TraceConfig config(
tracing_categories_, base::trace_event::RECORD_CONTINUOUSLY);
......@@ -79,16 +96,9 @@ class AshContentTest::Tracer {
}
~Tracer() {
{
// TODO(oshima): Figure out how interactive_ui_tests allows IO operation
// in teardown.
base::RunLoop runloop;
base::PostTaskAndReply(
FROM_HERE, {base::ThreadPool(), base::MayBlock()},
base::BindOnce(&Tracer::CreateTmp, base::Unretained(this)),
runloop.QuitClosure());
runloop.Run();
}
base::ScopedAllowBlockingForTesting allow_io;
CreateTmp();
{
base::RunLoop runloop;
auto trace_data_endpoint = content::TracingController::CreateFileEndpoint(
......@@ -98,6 +108,10 @@ class AshContentTest::Tracer {
runloop.Run();
CHECK(result);
}
base::FilePath report_file =
trace_file_.AddExtension(FILE_PATH_LITERAL("test_result.json"));
CreateTestResult(trace_file_, tbm_metrics_).WriteToFile(report_file);
}
void CreateTmp() {
......@@ -107,6 +121,7 @@ class AshContentTest::Tracer {
base::FilePath trace_dir_;
base::FilePath trace_file_;
std::string tracing_categories_;
std::vector<std::string> tbm_metrics_;
};
AshContentTest::AshContentTest()
......@@ -144,7 +159,7 @@ void AshContentTest::SetUpOnMainThread() {
std::move(dir),
std::move(
"benchmark,cc,viz,input,latency,gpu,rail,toplevel,ui,views,viz"),
GetUMAHistogramNames());
GetUMAHistogramNames(), GetTimelineBasedMetrics());
}
gfx::Size display_size = ash::Shell::GetPrimaryRootWindow()->bounds().size();
test_window_size_.set_height(
......@@ -154,8 +169,8 @@ void AshContentTest::SetUpOnMainThread() {
void AshContentTest::TearDownOnMainThread() {
tracer_.reset();
bool print =
base::CommandLine::ForCurrentProcess()->HasSwitch(kPerfTestPrintUmaMeans);
auto* command_line = base::CommandLine::ForCurrentProcess();
const bool print = command_line->HasSwitch(kPerfTestPrintUmaMeans);
LOG_IF(INFO, print) << "=== Histogram Means ===";
for (auto name : GetUMAHistogramNames()) {
EXPECT_TRUE(!!base::StatisticsRecorder::FindHistogram(name))
......@@ -210,5 +225,9 @@ void AshContentTest::PostRunTestOnMainThread() {
}
std::vector<std::string> AshContentTest::GetUMAHistogramNames() const {
return std::vector<std::string>();
return {};
}
std::vector<std::string> AshContentTest::GetTimelineBasedMetrics() const {
return {"renderingMetric", "umaMetric"};
}
......@@ -6,6 +6,8 @@
#define ASH_SHELL_CONTENT_TEST_ASH_CONTENT_TEST_H_
#include <memory>
#include <string>
#include <vector>
#include "base/macros.h"
#include "content/public/test/content_browser_test.h"
......@@ -30,6 +32,13 @@ class AshContentTest : public content::ContentBrowserTest {
virtual std::vector<std::string> GetUMAHistogramNames() const;
// Returns the names of timeline based metrics (TBM) to be extracted from
// the generated trace. The metrics must be defined in telemetry
// third_party/catapult/tracing/tracing/metrics/
// so that third_party/catapult/tracing/bin/run_metric could handle them.
// Default is "renderingMetric", "umaMetric".
virtual std::vector<std::string> GetTimelineBasedMetrics() const;
private:
class Tracer;
......
......@@ -3517,6 +3517,7 @@ test("unit_tests") {
"//skia",
"//testing/gmock",
"//testing/gtest",
"//testing/perf:unit_tests",
"//third_party/cacheinvalidation",
"//third_party/icu",
"//third_party/leveldatabase",
......
......@@ -12,6 +12,7 @@
#include "chrome/test/base/test_switches.h"
#include "content/public/browser/tracing_controller.h"
#include "services/tracing/public/cpp/trace_event_agent.h"
#include "testing/perf/luci_test_result.h"
#include "ui/compositor/compositor_switches.h"
#include "ui/gl/gl_switches.h"
......@@ -25,10 +26,10 @@
#include "ui/gfx/image/image_skia.h"
#endif // OS_CHROMEOS
static const char kTraceDir[] = "trace-dir";
namespace {
constexpr char kTraceDir[] = "trace-dir";
#if defined(OS_CHROMEOS)
// Watches if the wallpaper has been changed and runs a passed callback if so.
class TestWallpaperObserver : public ash::WallpaperControllerObserver {
......@@ -73,6 +74,18 @@ void CreateAndSetWallpaper() {
}
#endif // OS_CHROMEOS
perf_test::LuciTestResult CreateTestResult(
const base::FilePath& trace_file,
const std::vector<std::string>& tbm_metrics) {
perf_test::LuciTestResult result =
perf_test::LuciTestResult::CreateForGTest();
result.AddOutputArtifactFile("trace/1", trace_file, "application/json");
for (auto& metric : tbm_metrics)
result.AddTag("tbmv2", metric);
return result;
}
} // namespace
////////////////////////////////////////////////////////////////////////////////
......@@ -100,6 +113,10 @@ const std::string PerformanceTest::GetTracingCategories() const {
return std::string();
}
std::vector<std::string> PerformanceTest::GetTimelineBasedMetrics() const {
return {};
}
void PerformanceTest::SetUpOnMainThread() {
setup_called_ = true;
InProcessBrowserTest::SetUpOnMainThread();
......@@ -119,14 +136,15 @@ void PerformanceTest::SetUpOnMainThread() {
}
void PerformanceTest::TearDownOnMainThread() {
auto* command_line = base::CommandLine::ForCurrentProcess();
if (should_start_trace_) {
auto* controller = content::TracingController::GetInstance();
ASSERT_TRUE(controller->IsTracing())
<< "Did you forget to call PerformanceTest::SetUpOnMainThread?";
base::RunLoop runloop;
base::FilePath dir =
base::CommandLine::ForCurrentProcess()->GetSwitchValuePath(kTraceDir);
base::FilePath dir = command_line->GetSwitchValuePath(kTraceDir);
base::FilePath trace_file;
CHECK(base::CreateTemporaryFileInDir(dir, &trace_file));
LOG(INFO) << "Created the trace file: " << trace_file;
......@@ -136,9 +154,13 @@ void PerformanceTest::TearDownOnMainThread() {
bool result = controller->StopTracing(trace_data_endpoint);
runloop.Run();
CHECK(result);
base::FilePath report_file =
trace_file.AddExtension(FILE_PATH_LITERAL("test_result.json"));
CreateTestResult(trace_file, GetTimelineBasedMetrics())
.WriteToFile(report_file);
}
bool print = base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kPerfTestPrintUmaMeans);
bool print = command_line->HasSwitch(switches::kPerfTestPrintUmaMeans);
LOG_IF(INFO, print) << "=== Histogram Means ===";
for (auto name : GetUMAHistogramNames()) {
EXPECT_TRUE(HasHistogram(name)) << "missing histogram:" << name;
......@@ -177,3 +199,7 @@ void UIPerformanceTest::SetUpOnMainThread() {
const std::string UIPerformanceTest::GetTracingCategories() const {
return "benchmark,cc,viz,input,latency,gpu,rail,toplevel,ui,views,viz";
}
std::vector<std::string> UIPerformanceTest::GetTimelineBasedMetrics() const {
return {"renderingMetric", "umaMetric"};
}
......@@ -5,6 +5,7 @@
#ifndef CHROME_TEST_BASE_PERF_PERFORMANCE_TEST_H_
#define CHROME_TEST_BASE_PERF_PERFORMANCE_TEST_H_
#include "base/time/time.h"
#include "chrome/test/base/in_process_browser_test.h"
// PerformanceTest is an interactive-ui-test that can be used to collect traces.
......@@ -21,6 +22,11 @@ class PerformanceTest : public InProcessBrowserTest {
virtual std::vector<std::string> GetUMAHistogramNames() const;
virtual const std::string GetTracingCategories() const;
// Returns the names of timeline based metrics (TBM) to be extracted from
// the generated trace. The metrics must be defined in telemetry
// third_party/catapult/tracing/tracing/metrics/
// so that third_party/catapult/tracing/bin/run_metric could handle them.
virtual std::vector<std::string> GetTimelineBasedMetrics() const;
// InProcessBrowserTest:
void SetUpOnMainThread() override;
......@@ -52,6 +58,8 @@ class UIPerformanceTest : public PerformanceTest {
void SetUpOnMainThread() override;
const std::string GetTracingCategories() const override;
// Default is "renderingMetric", "umaMetric".
std::vector<std::string> GetTimelineBasedMetrics() const override;
private:
DISALLOW_COPY_AND_ASSIGN(UIPerformanceTest);
......
......@@ -5,6 +5,8 @@
source_set("perf") {
testonly = true
sources = [
"luci_test_result.cc",
"luci_test_result.h",
"perf_result_reporter.cc",
"perf_result_reporter.h",
"perf_test.cc",
......@@ -12,5 +14,18 @@ source_set("perf") {
]
deps = [
"//base",
"//testing/gtest",
]
}
source_set("unit_tests") {
testonly = true
sources = [
"luci_test_result_unittest.cc",
]
deps = [
":perf",
"//base",
"//testing/gtest",
]
}
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "testing/perf/luci_test_result.h"
#include <utility>
#include "base/files/file_util.h"
#include "base/json/json_writer.h"
#include "base/logging.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/values.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace perf_test {
namespace {
constexpr char kKeyFilePath[] = "filePath";
constexpr char kKeyContents[] = "contents";
constexpr char kKeyContentType[] = "contentType";
constexpr char kKeyTestResult[] = "testResult";
constexpr char kKeyTestPath[] = "testPath";
constexpr char kKeyVariant[] = "variant";
constexpr char kKeyStatus[] = "status";
constexpr char kKeyIsExpected[] = "isExpected";
constexpr char kKeyStartTime[] = "startTime";
constexpr char kKeyRunDuration[] = "runDuration";
constexpr char kKeyOutputArtifacts[] = "outputArtifacts";
constexpr char kKeyTags[] = "tags";
constexpr char kKeyKey[] = "key";
constexpr char kKeyValue[] = "value";
// Returns iso timeformat string of |time| in UTC.
std::string ToUtcIsoTime(base::Time time) {
base::Time::Exploded utc_exploded;
time.UTCExplode(&utc_exploded);
return base::StringPrintf(
"%d-%02d-%02dT%02d:%02d:%02d.%03dZ", utc_exploded.year,
utc_exploded.month, utc_exploded.day_of_month, utc_exploded.hour,
utc_exploded.minute, utc_exploded.second, utc_exploded.millisecond);
}
std::string ToString(LuciTestResult::Status status) {
using Status = LuciTestResult::Status;
switch (status) {
case Status::kUnspecified:
return "UNSPECIFIED";
case Status::kPass:
return "PASS";
case Status::kFail:
return "FAIL";
case Status::kCrash:
return "CRASH";
case Status::kAbort:
return "ABORT";
case Status::kSkip:
return "SKIP";
}
}
base::Value ToValue(const LuciTestResult::Artifact& artifact) {
// One and only one of the two optional fields must have value.
DCHECK(artifact.file_path.has_value() != artifact.contents.has_value());
base::Value dict(base::Value::Type::DICTIONARY);
if (artifact.file_path.has_value()) {
dict.SetStringKey(kKeyFilePath, artifact.file_path->AsUTF8Unsafe());
} else {
DCHECK(artifact.contents.has_value());
dict.SetStringKey(kKeyContents, artifact.contents.value());
}
dict.SetStringKey(kKeyContentType, artifact.content_type);
return dict;
}
base::Value ToValue(const LuciTestResult& result) {
base::Value test_report(base::Value::Type::DICTIONARY);
base::Value* test_result = test_report.SetKey(
kKeyTestResult, base::Value(base::Value::Type::DICTIONARY));
test_result->SetStringKey(kKeyTestPath, result.test_path());
if (!result.extra_variant_pairs().empty()) {
base::Value* variant_dict = test_result->SetKey(
kKeyVariant, base::Value(base::Value::Type::DICTIONARY));
for (const auto& pair : result.extra_variant_pairs())
variant_dict->SetStringKey(pair.first, pair.second);
}
test_result->SetStringKey(kKeyStatus, ToString(result.status()));
test_result->SetBoolKey(kKeyIsExpected, result.is_expected());
if (!result.start_time().is_null()) {
test_result->SetStringKey(kKeyStartTime, ToUtcIsoTime(result.start_time()));
}
if (!result.duration().is_zero()) {
test_result->SetStringKey(
kKeyRunDuration,
base::StringPrintf("%.2fs", result.duration().InSecondsF()));
}
if (!result.output_artifacts().empty()) {
base::Value* artifacts_dict = test_result->SetKey(
kKeyOutputArtifacts, base::Value(base::Value::Type::DICTIONARY));
for (const auto& pair : result.output_artifacts())
artifacts_dict->SetKey(pair.first, ToValue(pair.second));
}
if (!result.tags().empty()) {
base::Value* tags_list =
test_result->SetKey(kKeyTags, base::Value(base::Value::Type::LIST));
for (const auto& tag : result.tags()) {
base::Value tag_dict(base::Value::Type::DICTIONARY);
tag_dict.SetStringKey(kKeyKey, tag.key);
tag_dict.SetStringKey(kKeyValue, tag.value);
tags_list->Append(std::move(tag_dict));
}
}
return test_report;
}
std::string ToJson(const LuciTestResult& result) {
std::string json;
CHECK(base::JSONWriter::Write(ToValue(result), &json));
return json;
}
} // namespace
///////////////////////////////////////////////////////////////////////////////
// LuciTestResult::Artifact
LuciTestResult::Artifact::Artifact() = default;
LuciTestResult::Artifact::Artifact(const Artifact& other) = default;
LuciTestResult::Artifact::Artifact(const base::FilePath file_path,
const std::string& content_type)
: file_path(file_path), content_type(content_type) {}
LuciTestResult::Artifact::Artifact(const std::string& contents,
const std::string& content_type)
: contents(contents), content_type(content_type) {}
LuciTestResult::Artifact::~Artifact() = default;
///////////////////////////////////////////////////////////////////////////////
// LuciTestResult
LuciTestResult::LuciTestResult() = default;
LuciTestResult::LuciTestResult(const LuciTestResult& other) = default;
LuciTestResult::LuciTestResult(LuciTestResult&& other) = default;
LuciTestResult::~LuciTestResult() = default;
// static
LuciTestResult LuciTestResult::CreateForGTest() {
LuciTestResult result;
const testing::TestInfo* const test_info =
testing::UnitTest::GetInstance()->current_test_info();
std::string test_case_name = test_info->name();
std::string param_index;
// If there is a "/", extract |param_index| after it and strip it from
// |test_case_name|.
auto pos = test_case_name.rfind('/');
if (pos != std::string::npos) {
param_index = test_case_name.substr(pos + 1);
test_case_name.resize(pos);
}
result.set_test_path(base::StringPrintf("%s.%s", test_info->test_suite_name(),
test_case_name.c_str()));
if (test_info->type_param())
result.AddVariant("param/instantiation", test_info->type_param());
if (!param_index.empty())
result.AddVariant("param/index", param_index);
result.set_status(test_info->result()->Passed()
? LuciTestResult::Status::kPass
: LuciTestResult::Status::kFail);
// Assumes that the expectation is test passing.
result.set_is_expected(result.status() == LuciTestResult::Status::kPass);
// Start timestamp and duration is not set before the test run finishes,
// e.g. when called from PerformanceTest::TearDownOnMainThread.
if (test_info->result()->start_timestamp()) {
result.set_start_time(base::Time::FromTimeT(
static_cast<time_t>(test_info->result()->start_timestamp() / 1000)));
result.set_duration(
base::TimeDelta::FromMilliseconds(test_info->result()->elapsed_time()));
}
return result;
}
void LuciTestResult::AddVariant(const std::string& key,
const std::string& value) {
auto result = extra_variant_pairs_.insert({key, value});
DCHECK(result.second);
}
void LuciTestResult::AddOutputArtifactFile(const std::string& artifact_name,
const base::FilePath& file_path,
const std::string& content_type) {
Artifact artifact(file_path, content_type);
auto insert_result = output_artifacts_.insert(
std::make_pair(artifact_name, std::move(artifact)));
DCHECK(insert_result.second);
}
void LuciTestResult::AddOutputArtifactContents(
const std::string& artifact_name,
const std::string& contents,
const std::string& content_type) {
Artifact artifact(contents, content_type);
auto insert_result = output_artifacts_.insert(
std::make_pair(artifact_name, std::move(artifact)));
DCHECK(insert_result.second);
}
void LuciTestResult::AddTag(const std::string& key, const std::string& value) {
tags_.emplace_back(Tag{key, value});
}
void LuciTestResult::WriteToFile(const base::FilePath& result_file) const {
const std::string json = ToJson(*this);
const int json_size = json.size();
CHECK(WriteFile(result_file, json.data(), json_size) == json_size);
}
} // namespace perf_test
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TESTING_PERF_LUCI_TEST_RESULT_H_
#define TESTING_PERF_LUCI_TEST_RESULT_H_
#include <string>
#include <vector>
#include "base/containers/flat_map.h"
#include "base/files/file_path.h"
#include "base/macros.h"
#include "base/optional.h"
#include "base/time/time.h"
namespace perf_test {
// Generates TestResultEntry dict in LUCI Test Results format.
// See: go/luci-test-results-design
// //infra/go/src/go.chromium.org/luci/results/proto/v1/test_result.proto
class LuciTestResult {
public:
// Represents a test result status.
enum class Status {
// The test status is unspecified.
kUnspecified,
// The test has passed.
kPass,
// The test has failed.
kFail,
// The test did not complete because it crashed.
kCrash,
// The test did not complete because it was interrupted, e.g. timeout.
kAbort,
// The test or test framework decided not to run the test, or the test was
// not run due to previous tests timing out.
kSkip
};
// Represents an artifact.
struct Artifact {
Artifact();
Artifact(const Artifact& other);
Artifact(const base::FilePath file_path, const std::string& content_type);
Artifact(const std::string& contents, const std::string& content_type);
~Artifact();
// Use only one of the two fields below.
// Absolute path on the same machine running the test.
base::Optional<base::FilePath> file_path;
// The data of the artifact.
base::Optional<std::string> contents;
std::string content_type;
};
// Represents a tag.
struct Tag {
std::string key;
std::string value;
};
LuciTestResult();
LuciTestResult(const LuciTestResult& other);
LuciTestResult(LuciTestResult&& other);
~LuciTestResult();
// Helper to create a LuciTestResult and fill in info for the current gtest.
static LuciTestResult CreateForGTest();
// Adds a variant key-value pair to |extra_variant_pairs_|. See VariantDef in
// //infra/go/src/go.chromium.org/luci/resultdb/proto/v1/common.proto
// for more details.
void AddVariant(const std::string& key, const std::string& value);
// Adds an output artifact.
void AddOutputArtifactFile(const std::string& artifact_name,
const base::FilePath& file_path,
const std::string& content_type);
void AddOutputArtifactContents(const std::string& artifact_name,
const std::string& contents,
const std::string& content_type);
// Adds a tag.
void AddTag(const std::string& key, const std::string& value);
// Writes to |result_file|.
void WriteToFile(const base::FilePath& result_file) const;
// Getters and setters.
const std::string& test_path() const { return test_path_; }
void set_test_path(const std::string& test_path) { test_path_ = test_path; }
const base::flat_map<std::string, std::string>& extra_variant_pairs() const {
return extra_variant_pairs_;
}
Status status() const { return status_; }
void set_status(Status status) { status_ = status; }
bool is_expected() const { return is_expected_; }
void set_is_expected(bool is_expcted) { is_expected_ = is_expcted; }
base::Time start_time() const { return start_time_; }
void set_start_time(base::Time start_time) { start_time_ = start_time; }
base::TimeDelta duration() const { return duration_; }
void set_duration(base::TimeDelta duration) { duration_ = duration; }
const base::flat_map<std::string, Artifact>& output_artifacts() const {
return output_artifacts_;
}
const std::vector<Tag>& tags() const { return tags_; }
private:
// For gtest, |test_path_| is <test_suite_name>.<test_case_name>, without
// the param annotations. E.g. "InstantiationName/SuiteName.CaseName/0"
// will have "/0" stripped and be just "InstantiationName/SuiteName.CaseName".
std::string test_path_;
// For gtest, |extra_variant_pairs_| holds info about the type param and
// value param for typed/parameterized tests.
base::flat_map<std::string, std::string> extra_variant_pairs_;
// Status of the test result.
Status status_ = Status::kUnspecified;
// Whether |status| is expected.
bool is_expected_ = false;
// Test start time.
base::Time start_time_;
// Duration of the test.
base::TimeDelta duration_;
// Artifacts of the test run.
base::flat_map<std::string, Artifact> output_artifacts_;
// Tags of the test run.
std::vector<Tag> tags_;
};
} // namespace perf_test
#endif // TESTING_PERF_LUCI_TEST_RESULT_H_
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "testing/perf/luci_test_result.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/json/json_reader.h"
#include "base/optional.h"
#include "base/strings/stringprintf.h"
#include "base/time/time.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace perf_test {
class LuciTestResultTest : public testing::Test {
public:
LuciTestResultTest() = default;
~LuciTestResultTest() override = default;
// testing::Test:
void SetUp() override {
testing::Test::SetUp();
ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
}
base::FilePath GetResultFilePath() const {
return temp_dir_.GetPath().AppendASCII("luci_test_results.json");
}
// Validates that |result| is written to file that contains an equivalent JSON
// as |expected_json|.
void ValidateResult(const LuciTestResult& result,
const std::string& expected_json) {
const base::FilePath result_file = GetResultFilePath();
result.WriteToFile(result_file);
std::string json;
ASSERT_TRUE(ReadFileToString(GetResultFilePath(), &json));
base::Optional<base::Value> value = base::JSONReader::Read(json);
ASSERT_TRUE(value.has_value());
base::Optional<base::Value> expected_value =
base::JSONReader::Read(expected_json);
ASSERT_TRUE(expected_value.has_value());
EXPECT_EQ(expected_value, value) << "Expected:\n====\n"
<< expected_json << "\nActual:\n====\n"
<< json;
}
private:
base::ScopedTempDir temp_dir_;
DISALLOW_COPY_AND_ASSIGN(LuciTestResultTest);
};
TEST_F(LuciTestResultTest, Basic) {
LuciTestResult result;
result.set_test_path("FakeTestSuite.FakeTest");
result.set_status(LuciTestResult::Status::kPass);
result.set_is_expected(true);
result.AddVariant("variantKey", "variantValue");
result.AddVariant("param/instantiation", "FooType");
result.AddVariant("param/index", "0");
// 2019/9/11 12:30 UTC
base::Time start_time;
ASSERT_TRUE(
base::Time::FromUTCExploded({2019, 9, 3, 11, 12, 30, 0}, &start_time));
result.set_start_time(start_time);
result.set_duration(base::TimeDelta::FromMilliseconds(1500));
result.AddOutputArtifactContents("plain", "plain data", "text/plain");
result.AddOutputArtifactContents("new_line", "first\nsecond", "text/plain");
result.AddOutputArtifactFile(
"file.json", base::FilePath(FILE_PATH_LITERAL("/tmp/file.json")),
"application/json");
result.AddTag("tbmv2", "umaMetric");
const std::string expected_json =
R"({
"testResult":{
"outputArtifacts":{
"file.json":{
"contentType":"application/json",
"filePath":"/tmp/file.json"
},
"new_line":{
"contentType":"text/plain",
"contents":"first\nsecond"
},
"plain":{
"contentType":"text/plain",
"contents":"plain data"
}
},
"isExpected":true,
"runDuration":"1.50s",
"startTime":"2019-09-11T12:30:00.000Z",
"status":"PASS",
"tags":[
{"key":"tbmv2","value":"umaMetric"}
],
"variant":{
"variantKey": "variantValue",
"param/instantiation": "FooType",
"param/index": "0"
},
"testPath":"FakeTestSuite.FakeTest"
}
})";
ValidateResult(result, expected_json);
}
TEST_F(LuciTestResultTest, Status) {
using Status = LuciTestResult::Status;
LuciTestResult result;
result.set_test_path("FakeTestSuite.Status");
const std::string json_template =
R"({
"testResult":{
"isExpected":false,
"status":"%s",
"testPath":"FakeTestSuite.Status"
}
})";
const struct {
Status status;
const char* status_text;
} kTestCases[] = {
{Status::kUnspecified, "UNSPECIFIED"},
{Status::kPass, "PASS"},
{Status::kFail, "FAIL"},
{Status::kCrash, "CRASH"},
{Status::kAbort, "ABORT"},
{Status::kSkip, "SKIP"},
};
for (const auto& test_case : kTestCases) {
result.set_status(test_case.status);
const std::string expected_json =
base::StringPrintf(json_template.c_str(), test_case.status_text);
ValidateResult(result, expected_json);
}
}
///////////////////////////////////////////////////////////////////////////////
class LuciTestResultParameterizedTest
: public LuciTestResultTest,
public testing::WithParamInterface<int> {
public:
LuciTestResultParameterizedTest() = default;
~LuciTestResultParameterizedTest() override = default;
};
TEST_P(LuciTestResultParameterizedTest, Variant) {
LuciTestResult result = LuciTestResult::CreateForGTest();
const std::string json_template =
R"({
"testResult":{
"isExpected":true,
"status":"PASS",
"testPath":
"ZeroToFiveSequence/LuciTestResultParameterizedTest.Variant",
"variant":{"param/index":"%d"}
}
})";
const std::string expected_json =
base::StringPrintf(json_template.c_str(), GetParam());
ValidateResult(result, expected_json);
}
INSTANTIATE_TEST_SUITE_P(ZeroToFiveSequence,
LuciTestResultParameterizedTest,
testing::Range(0, 5));
///////////////////////////////////////////////////////////////////////////////
template <typename T>
class LuciTestResultTypedTest : public LuciTestResultTest {
public:
LuciTestResultTypedTest() = default;
~LuciTestResultTypedTest() override = default;
};
TYPED_TEST_SUITE_P(LuciTestResultTypedTest);
TYPED_TEST_P(LuciTestResultTypedTest, Variant) {
LuciTestResult result = LuciTestResult::CreateForGTest();
std::string test_suite_name =
testing::UnitTest::GetInstance()->current_test_info()->test_suite_name();
auto pos = test_suite_name.rfind('/');
ASSERT_NE(pos, std::string::npos);
std::string type_param_name = test_suite_name.substr(pos + 1);
const std::string json_template =
R"({
"testResult":{
"isExpected":true,
"status":"PASS",
"testPath":"LuciTestResultTypedTest/%s.Variant",
"variant":{"param/instantiation":"%s"}
}
})";
// Note that chromium has RTTI disabled. As a result, type_param() and
// GetTypeName<> always returns a generic "<type>".
const std::string expected_json =
base::StringPrintf(json_template.c_str(), type_param_name.c_str(),
testing::internal::GetTypeName<TypeParam>().c_str());
this->ValidateResult(result, expected_json);
}
REGISTER_TYPED_TEST_SUITE_P(LuciTestResultTypedTest, Variant);
using SomeTypes = testing::Types<int, double>;
INSTANTIATE_TYPED_TEST_SUITE_P(, LuciTestResultTypedTest, SomeTypes);
} // namespace perf_test
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment