Commit a94eef13 authored by Yuri Wiitala's avatar Yuri Wiitala Committed by Commit Bot

Move tab mirroring performance tests into browser_tests.

This moves TabCapturePerformanceTest and CastV2PerformanceTest into
browser_tests. By default, they have been changed to run in a quick-
check mode. When run for performance testing on the perf bots, the
additional --full-performance-run command line switch will be required.

The performance_browser_tests isolate config has been changed to run
browser_tests with the --full-performance-run switch, gtest filters,
and timeout overrides. Also, the old performance_browser_tests GN test
target has been turned into a simple group target that groups the
necessary data dependencies for running on the perf bots.

This change also includes some changes to allow the tests to complete
in the absence of tracing data when in quick-check mode. While most
bots do collect a reasonable amount of tracing data in a few short
seconds, one or two don't always do so. Unfortunately, we can't justify
making the CQ run longer to force data collection in all cases.

Bug: 804191
Change-Id: I2de6c0b94453981fb0d9015f6b3f0e42f57cc2a6
Reviewed-on: https://chromium-review.googlesource.com/c/1428399
Commit-Queue: Yuri Wiitala <miu@chromium.org>
Reviewed-by: default avatarJochen Eisinger <jochen@chromium.org>
Reviewed-by: default avatarCaleb Rouleau <crouleau@chromium.org>
Cr-Commit-Position: refs/heads/master@{#632366}
parent cc303a03
......@@ -9,6 +9,7 @@
#include <cmath>
#include "base/base64.h"
#include "base/base_switches.h"
#include "base/bind.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
......@@ -34,6 +35,10 @@
#include "third_party/zlib/google/compression_utils.h"
#include "ui/gl/gl_switches.h"
namespace {
constexpr base::StringPiece kFullPerformanceRunSwitch = "full-performance-run";
} // namespace
TabCapturePerformanceTestBase::TabCapturePerformanceTestBase() = default;
TabCapturePerformanceTestBase::~TabCapturePerformanceTestBase() = default;
......@@ -57,6 +62,14 @@ void TabCapturePerformanceTestBase::SetUpOnMainThread() {
void TabCapturePerformanceTestBase::SetUpCommandLine(
base::CommandLine* command_line) {
is_full_performance_run_ = command_line->HasSwitch(kFullPerformanceRunSwitch);
// In the spirit of the NoBestEffortTasksTests, it's important to add this
// flag to make sure best-effort tasks are not required for the success of
// these tests. In a performance test run, this also removes sources of
// variance.
command_line->AppendSwitch(switches::kDisableBestEffortTasks);
// Note: The naming "kUseGpuInTests" is very misleading. It actually means
// "don't use a software OpenGL implementation." Subclasses will either call
// UseSoftwareCompositing() to use Chrome's software compositor, or else they
......@@ -130,18 +143,54 @@ base::Value TabCapturePerformanceTestBase::SendMessageToExtension(
return base::Value();
}
std::string TabCapturePerformanceTestBase::TraceAndObserve(
const std::string& category_patterns) {
LOG(INFO) << "Starting tracing and running for "
<< kObservationPeriod.InSecondsF() << " sec...";
TabCapturePerformanceTestBase::TraceAnalyzerUniquePtr
TabCapturePerformanceTestBase::TraceAndObserve(
const std::string& category_patterns,
const std::vector<base::StringPiece>& event_names,
int required_event_count) {
const base::TimeDelta observation_period = is_full_performance_run_
? kFullRunObservationPeriod
: kQuickRunObservationPeriod;
LOG(INFO) << "Starting tracing...";
{
// Wait until all child processes have ACK'ed that they are now tracing.
base::trace_event::TraceConfig trace_config(
category_patterns, base::trace_event::RECORD_CONTINUOUSLY);
base::RunLoop run_loop;
const bool did_begin_tracing = tracing::BeginTracingWithTraceConfig(
trace_config, run_loop.QuitClosure());
CHECK(did_begin_tracing);
run_loop.Run();
}
LOG(INFO) << "Running browser for " << observation_period.InSecondsF()
<< " sec...";
ContinueBrowserFor(observation_period);
LOG(INFO) << "Observation period has completed. Ending tracing...";
std::string json_events;
bool success = tracing::BeginTracing(category_patterns);
CHECK(success);
ContinueBrowserFor(kObservationPeriod);
success = tracing::EndTracing(&json_events);
const bool success = tracing::EndTracing(&json_events);
CHECK(success);
LOG(INFO) << "Observation period has completed. Ending tracing...";
return json_events;
std::unique_ptr<trace_analyzer::TraceAnalyzer> result(
trace_analyzer::TraceAnalyzer::Create(json_events));
result->AssociateAsyncBeginEndEvents();
bool have_enough_events = true;
for (const auto& event_name : event_names) {
trace_analyzer::TraceEventVector events;
QueryTraceEvents(result.get(), event_name, &events);
LOG(INFO) << "Collected " << events.size() << " events ("
<< required_event_count << " required) for: " << event_name;
if (static_cast<int>(events.size()) < required_event_count) {
have_enough_events = false;
}
}
LOG_IF(WARNING, !have_enough_events) << "Insufficient data collected.";
VLOG_IF(2, result) << "Dump of trace events (trace_events.json.gz.b64):\n"
<< MakeBase64EncodedGZippedString(json_events);
return result;
}
// static
......@@ -182,6 +231,21 @@ void TabCapturePerformanceTestBase::ContinueBrowserFor(
run_loop.Run();
}
// static
void TabCapturePerformanceTestBase::QueryTraceEvents(
trace_analyzer::TraceAnalyzer* analyzer,
base::StringPiece event_name,
trace_analyzer::TraceEventVector* events) {
const trace_analyzer::Query kQuery =
trace_analyzer::Query::EventNameIs(event_name.as_string()) &&
(trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) ||
trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) ||
trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) ||
trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT) ||
trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE));
analyzer->FindEvents(kQuery, events);
}
std::unique_ptr<net::test_server::HttpResponse>
TabCapturePerformanceTestBase::HandleRequest(
const net::test_server::HttpRequest& request) {
......@@ -199,7 +263,12 @@ TabCapturePerformanceTestBase::HandleRequest(
}
// static
constexpr base::TimeDelta TabCapturePerformanceTestBase::kObservationPeriod;
constexpr base::TimeDelta
TabCapturePerformanceTestBase::kFullRunObservationPeriod;
// static
constexpr base::TimeDelta
TabCapturePerformanceTestBase::kQuickRunObservationPeriod;
// static
constexpr base::TimeDelta
......
......@@ -7,8 +7,11 @@
#include <memory>
#include <string>
#include <vector>
#include "base/macros.h"
#include "base/strings/string_piece.h"
#include "base/test/trace_event_analyzer.h"
#include "chrome/test/base/in_process_browser_test.h"
namespace base {
......@@ -43,6 +46,11 @@ class TabCapturePerformanceTestBase : public InProcessBrowserTest {
void SetUpOnMainThread() override;
void SetUpCommandLine(base::CommandLine* command_line) override;
// If true, run a full performance test. If false, all tests should just run a
// quick test, something appropriate for running in a CQ try run or the
// waterfall.
bool is_full_performance_run() const { return is_full_performance_run_; }
// Returns the currently-loaded extension.
const extensions::Extension* extension() const { return extension_; }
......@@ -68,9 +76,12 @@ class TabCapturePerformanceTestBase : public InProcessBrowserTest {
base::Value SendMessageToExtension(const std::string& json);
// Runs the browser for a while, with tracing enabled to collect events
// matching the given |category_patterns|, then returns the JSON events string
// returned by tracing::EndTracing().
std::string TraceAndObserve(const std::string& category_patterns);
// matching the given |category_patterns|.
using TraceAnalyzerUniquePtr = std::unique_ptr<trace_analyzer::TraceAnalyzer>;
TraceAnalyzerUniquePtr TraceAndObserve(
const std::string& category_patterns,
const std::vector<base::StringPiece>& event_names,
int required_event_count);
// Returns the path ".../test/data/extensions/api_test/".
static base::FilePath GetApiTestDataDir();
......@@ -82,11 +93,19 @@ class TabCapturePerformanceTestBase : public InProcessBrowserTest {
// Uses base::RunLoop to run the browser for the given |duration|.
static void ContinueBrowserFor(base::TimeDelta duration);
// Queries the |analyzer| for events having the given |event_name| whose phase
// is classified as BEGIN, INSTANT, or COMPLETE (i.e., omit END events).
static void QueryTraceEvents(trace_analyzer::TraceAnalyzer* analyzer,
base::StringPiece event_name,
trace_analyzer::TraceEventVector* events);
protected:
// After the page has loaded, this is how long the browser is run with trace
// event recording taking place.
static constexpr base::TimeDelta kObservationPeriod =
// These are how long the browser is run with trace event recording taking
// place.
static constexpr base::TimeDelta kFullRunObservationPeriod =
base::TimeDelta::FromSeconds(15);
static constexpr base::TimeDelta kQuickRunObservationPeriod =
base::TimeDelta::FromSeconds(4);
// If sending a message to the extension fails, because the extension has not
// started its message listener yet, how long before the next retry?
......@@ -103,6 +122,8 @@ class TabCapturePerformanceTestBase : public InProcessBrowserTest {
static const char kExtensionId[];
private:
bool is_full_performance_run_ = false;
// Set to the test page that should be served by the next call to
// HandleRequest().
std::string test_page_to_serve_;
......
......@@ -1330,6 +1330,7 @@ test("browser_tests") {
"../browser/extensions/api/braille_display_private/mock_braille_controller.h",
"../browser/extensions/api/browsing_data/browsing_data_test.cc",
"../browser/extensions/api/cast_streaming/cast_streaming_apitest.cc",
"../browser/extensions/api/cast_streaming/performance_test.cc",
"../browser/extensions/api/command_line_private/command_line_private_apitest.cc",
"../browser/extensions/api/commands/command_service_browsertest.cc",
"../browser/extensions/api/content_settings/content_settings_apitest.cc",
......@@ -1397,6 +1398,9 @@ test("browser_tests") {
"../browser/extensions/api/system_indicator/system_indicator_apitest.cc",
"../browser/extensions/api/system_private/system_private_apitest.cc",
"../browser/extensions/api/tab_capture/tab_capture_apitest.cc",
"../browser/extensions/api/tab_capture/tab_capture_performance_test_base.cc",
"../browser/extensions/api/tab_capture/tab_capture_performance_test_base.h",
"../browser/extensions/api/tab_capture/tab_capture_performancetest.cc",
"../browser/extensions/api/tabs/tabs_test.cc",
"../browser/extensions/api/terminal/terminal_private_apitest.cc",
"../browser/extensions/api/test/apitest_apitest.cc",
......@@ -5290,81 +5294,14 @@ test("chrome_app_unittests") {
}
}
if (!is_android && !is_chromecast) {
test("performance_browser_tests") {
sources = [
"../app/chrome_version.rc.version",
"../browser/extensions/api/cast_streaming/performance_test.cc",
"../browser/extensions/api/tab_capture/tab_capture_performance_test_base.cc",
"../browser/extensions/api/tab_capture/tab_capture_performance_test_base.h",
"../browser/extensions/api/tab_capture/tab_capture_performancetest.cc",
"../browser/extensions/chrome_extension_test_notification_observer.cc",
"../browser/extensions/chrome_extension_test_notification_observer.h",
"../browser/extensions/extension_apitest.cc",
"../browser/extensions/extension_browsertest.cc",
"../browser/extensions/updater/extension_cache_fake.cc",
"../browser/extensions/updater/extension_cache_fake.h",
"base/browser_perf_tests_main.cc",
]
defines = [ "HAS_OUT_OF_PROC_TEST_RUNNER" ]
deps = [
":test_support",
":test_support_ui",
"//base",
"//base:i18n",
"//base/test:test_support",
"//chrome:browser_tests_pak",
"//chrome:packed_resources",
"//chrome:resources",
"//chrome/app:chrome_dll_resources",
"//chrome/browser",
"//chrome/renderer",
"//components/about_handler",
"//components/autofill/content/renderer:test_support",
"//components/spellcheck:buildflags",
"//content/test:test_support",
"//extensions/buildflags",
"//media/cast:test_support",
"//testing/gmock",
"//testing/gtest",
"//testing/perf",
]
data = [
"//chrome/test/data/extensions/api_test/",
]
data_deps = [
"//testing:run_perf_test",
]
if (is_win) {
configs -= [ "//build/config/win:default_incremental_linking" ]
configs +=
[ "//build/config/win:default_large_module_incremental_linking" ]
deps += [ "//chrome/app:command_ids" ]
} else {
sources -= [ "../app/chrome_version.rc.version" ]
}
if (is_linux || is_win) {
data += [
"$root_out_dir/chrome_100_percent.pak",
"$root_out_dir/chrome_200_percent.pak",
"$root_out_dir/locales/en-US.pak",
"$root_out_dir/locales/fr.pak",
"$root_out_dir/resources.pak",
]
}
if (is_mac) {
deps += [ "//chrome:chrome_app" ]
}
# This target should not require the Chrome executable to run.
assert_no_deps = [ "//chrome" ]
}
# Everything needed to build/copy to run the performance_browser_tests isolate.
# See: //testing/buildbot/gn_isolate_map.pyl
group("performance_browser_tests") {
testonly = true
data_deps = [
":browser_tests",
"//testing:run_perf_test",
]
}
if (!is_android && !is_fuchsia) {
......
......@@ -2062,16 +2062,18 @@
},
"performance_browser_tests": {
"args": [
"performance_browser_tests",
"browser_tests",
"--non-telemetry=true",
"--full-performance-run",
"--test-launcher-jobs=1",
"--test-launcher-retry-limit=0",
"--test-launcher-print-test-stdio=always",
# TODO(crbug.com/759866): Figure out why CastV2PerformanceTest/0 sometimes
# takes 15-30 seconds to start up and, once fixed, remove this workaround
# (extends the watchdog timeout to 2 minutes, normally 30 seconds).
"--test-launcher-timeout=120000",
# Allow the full performance runs to take up to 60 seconds (rather than
# the default of 30 for normal CQ browser test runs).
"--ui-test-action-timeout=60000",
"--ui-test-action-max-timeout=60000",
"--test-launcher-timeout=60000",
"--gtest_filter=TabCapturePerformanceTest.*:CastV2PerformanceTest.*",
"--test-launcher-jobs=1",
"--enable-gpu",
],
"label": "//chrome/test:performance_browser_tests",
"script": "//testing/scripts/run_performance_tests.py",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment