Commit e9e1b195 authored by Nicolás Peña Moreno's avatar Nicolás Peña Moreno Committed by Commit Bot

Reland "Reland "Add an end-to-end test for LCP""

This is a reland of 7aa36149

Original change's description:
> Reland "Add an end-to-end test for LCP"
> 
> This is a reland of 0a615aa9
> 
> Some tests run without DCHECK so we instead call
> event.GetKnownArgAsValue().
> 
> Original change's description:
> > Add an end-to-end test for LCP
> >
> > This CL adds an end-to-end test for verifying the behaviour of Largest
> > Contentful Paint. Specifically, we check that the API exposed through
> > JavaScript, UKM, UMA and TraceEvents all capture consistent measurements.
> >
> > Change-Id: I5d3c2489e747d410c7d67afe2a49ead6482a3bb5
> > Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2105395
> > Reviewed-by: Nicolás Peña Moreno <npm@chromium.org>
> > Reviewed-by: Steve Kobes <skobes@chromium.org>
> > Commit-Queue: Nicolás Peña Moreno <npm@chromium.org>
> > Cr-Commit-Position: refs/heads/master@{#760594}
> 
> Bug: 1072789
> 
> Change-Id: Ic7f6cf486ed18939a98462a458ac0bb1cc74ff93
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2159616
> Auto-Submit: Nicolás Peña Moreno <npm@chromium.org>
> Commit-Queue: Steve Kobes <skobes@chromium.org>
> Reviewed-by: Steve Kobes <skobes@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#761304}

Bug: 1072789
Change-Id: I74b2e41bdce17444139e5f05f29db76f5c103db2
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2161687Reviewed-by: default avatarSteve Kobes <skobes@chromium.org>
Commit-Queue: Nicolás Peña Moreno <npm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#761911}
parent 5addce7c
<script src="resources/testharness.js"></script>
<script>
// Tell testharness.js to not wait for 'real' tests; we only want
// testharness.js for its assertion helpers.
setup({'output': false});
</script>
<script>
// 'AsyncBuffer' serves as a helper to buffer LCP reports asynchronously.
class AsyncBuffer {
constructor() {
// 'pending' is an array that will buffer entries reported through the
// PerformanceObserver and can be collected with 'pop'.
this.pending = [];
// 'resolve_fn' is a reference to the 'resolve' function of a
// Promise that blocks for new entries to arrive via 'push()'. Calling
// the function resolves the promise and unblocks calls to 'pop()'.
this.resolve_fn = null;
}
// Concatenates the given 'entries' list to this AsyncBuffer.
push(entries) {
if (entries.length == 0) {
throw new Error("Must not push an empty list of entries!");
}
this.pending = this.pending.concat(entries);
// If there are calls to 'pop' that are blocked waiting for items, signal
// that they can continue.
if (this.resolve_fn != null) {
this.resolve_fn();
this.resolve_fn = null;
}
}
// Takes the current pending entries from this AsyncBuffer. If there are no
// entries queued already, this will block until some show up.
async pop() {
if (this.pending.length == 0) {
// Need to instantiate a promise to block on. The next call to 'push'
// will resolve the promise once it has queued the entries.
await new Promise(resolve => {
this.resolve_fn = resolve;
});
}
assert_true(this.pending.length > 0);
const result = this.pending;
this.pending = [];
return result;
}
}
const buffer = new AsyncBuffer();
const po = new PerformanceObserver(entryList => {
buffer.push(entryList.getEntries());
});
po.observe({type: 'largest-contentful-paint', buffered: true});
</script>
<div id="content_div_1">
<img src="images/green-16x16.png"></img>
</div>
<div id="content_div_2">
</div>
<script>
const block_for_next_lcp = async () => {
return buffer.pop().then(seen_events => {
// This test case assumes each LCP entry is handled before the next could
// possibly be generated.
assert_equals(seen_events.length, 1);
return seen_events[0];
});
};
// Adds another image that is larger than "green-16x16.png". We expect this
// operation to trigger a new LCP entry.
const add_larger_image = () => {
let new_img = document.createElement("img");
content_div_2.appendChild(new_img);
new_img.src = "images/blue96x96.png";
new_img.id = "blue_image";
};
// Removes the image added by 'add_larger_image'. We expect this operation to
// trigger a new LCP entry.
const remove_larger_image = () => {
const blue_image = document.getElementById("blue_image");
assert_not_equals(blue_image, null);
content_div_2.removeChild(blue_image);
};
const run_test = async () => {
// This test exerciess the following scenario
// - have an initial page load with an image
// - assert that LCP fires for that image
// - add a larger image to the page
// - assert that LCP fires for the new image
// - remove the larger image
// - assert that LCP fires (again) for the first image
const lcp_0 = await block_for_next_lcp();
add_larger_image();
const lcp_1 = await block_for_next_lcp();
remove_larger_image();
const lcp_2 = await block_for_next_lcp();
// Now that we've run through the scenario and collected our measurements,
// return them in a structure that the C++ side can easily query.
let output = [
// lcp_0
{
url: lcp_0.url,
time: lcp_0.startTime
},
// lcp_1
{
url: lcp_1.url,
time: lcp_1.startTime
},
// lcp_2
{
url: lcp_2.url,
time: lcp_2.startTime
}
];
return output;
};
</script>
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
# found in the LICENSE file. # found in the LICENSE file.
metric_integration_jsdeps = [ metric_integration_jsdeps = [
"//third_party/blink/web_tests/external/wpt/images/blue96x96.png",
"//third_party/blink/web_tests/external/wpt/images/green-16x16.png",
"//third_party/blink/web_tests/external/wpt/layout-instability/resources/util.js", "//third_party/blink/web_tests/external/wpt/layout-instability/resources/util.js",
"//third_party/blink/web_tests/external/wpt/resources/testharness.js", "//third_party/blink/web_tests/external/wpt/resources/testharness.js",
] ]
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/page_load_metrics/integration_tests/metric_integration_test.h"
#include "base/json/json_string_value_serializer.h"
#include "base/strings/strcat.h"
#include "base/test/trace_event_analyzer.h"
#include "chrome/test/base/ui_test_utils.h"
#include "services/metrics/public/cpp/ukm_builders.h"
using trace_analyzer::Query;
using trace_analyzer::TraceAnalyzer;
using trace_analyzer::TraceEvent;
using trace_analyzer::TraceEventVector;
using ukm::builders::PageLoad;
namespace {
void ValidateCandidate(int expected_size, const TraceEvent& event) {
std::unique_ptr<base::Value> data;
ASSERT_TRUE(event.GetArgAsValue("data", &data));
const base::Optional<int> traced_size = data->FindIntKey("size");
ASSERT_TRUE(traced_size.has_value());
EXPECT_EQ(traced_size.value(), expected_size);
const base::Optional<bool> traced_main_frame_flag =
data->FindBoolKey("isMainFrame");
ASSERT_TRUE(traced_main_frame_flag.has_value());
EXPECT_TRUE(traced_main_frame_flag.value());
}
int GetCandidateIndex(const TraceEvent& event) {
std::unique_ptr<base::Value> data = event.GetKnownArgAsValue("data");
base::Optional<int> candidate_idx = data->FindIntKey("candidateIndex");
DCHECK(candidate_idx.has_value()) << "couldn't find 'candidateIndex'";
return candidate_idx.value();
}
bool compare_candidate_index(const TraceEvent* lhs, const TraceEvent* rhs) {
return GetCandidateIndex(*lhs) < GetCandidateIndex(*rhs);
}
void ValidateTraceEvents(std::unique_ptr<TraceAnalyzer> analyzer) {
TraceEventVector events;
analyzer->FindEvents(Query::EventNameIs("largestContentfulPaint::Candidate"),
&events);
EXPECT_EQ(3ul, events.size());
std::sort(events.begin(), events.end(), compare_candidate_index);
// LCP_0 uses green-16x16.png, of size 16 x 16.
ValidateCandidate(16 * 16, *events[0]);
// LCP_1 uses blue96x96.png, of size 96 x 96.
ValidateCandidate(96 * 96, *events[1]);
// LCP_2 uses green-16x16.png, of size 16 x 16.
ValidateCandidate(16 * 16, *events[2]);
}
} // namespace
IN_PROC_BROWSER_TEST_F(MetricIntegrationTest, LargestContentfulPaint) {
Start();
StartTracing({"loading"});
Load("/largest_contentful_paint.html");
// The test harness serves files from something like http://example.com:34777
// but the port number can vary. Extract the 'window.origin' property so we
// can compare encountered URLs to expected values.
const std::string window_origin =
EvalJs(web_contents(), "window.origin").ExtractString();
const std::string image_1_url_expected =
base::StrCat({window_origin, "/images/green-16x16.png"});
const std::string image_2_url_expected =
base::StrCat({window_origin, "/images/blue96x96.png"});
content::EvalJsResult result = EvalJs(web_contents(), "run_test()");
EXPECT_EQ("", result.error);
// Verify that the JS API yielded three LCP reports. Note that, as we resolve
// https://github.com/WICG/largest-contentful-paint/issues/41, this test may
// need to be updated to reflect new semantics.
const auto& list = result.value.GetList();
const std::string expected_url[3] = {
image_1_url_expected, image_2_url_expected, image_1_url_expected};
base::Optional<double> lcp_timestamps[3];
for (size_t i = 0; i < 3; i++) {
const std::string* url = list[i].FindStringPath("url");
EXPECT_TRUE(url);
EXPECT_EQ(*url, expected_url[i]);
lcp_timestamps[i] = list[i].FindDoublePath("time");
EXPECT_TRUE(lcp_timestamps[i].has_value());
}
EXPECT_EQ(lcp_timestamps[0], lcp_timestamps[2])
<< "The first and last LCP reports should be for the same paint so they "
"should have the same timestamp";
EXPECT_LT(lcp_timestamps[0], lcp_timestamps[1])
<< "The first and second LCP reports should be for different paints so "
"should have different timestamps";
// Need to navigate away from the test html page to force metrics to get
// flushed/synced.
ui_test_utils::NavigateToURL(browser(), GURL("about:blank"));
// Check Trace Events.
ValidateTraceEvents(StopTracingAndAnalyze());
// Check UKM.
// Since UKM rounds to an integer while the JS API returns a double, we'll
// assert that the UKM and JS values are within 1.0 of each other. Comparing
// with strict equality could round incorrectly and introduce flakiness into
// the test.
ExpectUKMPageLoadMetricNear(
PageLoad::kPaintTiming_NavigationToLargestContentfulPaintName,
lcp_timestamps[2].value(), 1.0);
ExpectUKMPageLoadMetricNear(
PageLoad::kPaintTiming_NavigationToLargestContentfulPaint_MainFrameName,
lcp_timestamps[2].value(), 1.0);
// Check UMA.
// Similar to UKM, rounding could introduce flakiness, so use helper to
// compare near.
ExpectUniqueUMAPageLoadMetricNear(
"PageLoad.PaintTiming.NavigationToLargestContentfulPaint",
lcp_timestamps[2].value());
ExpectUniqueUMAPageLoadMetricNear(
"PageLoad.PaintTiming.NavigationToLargestContentfulPaint.MainFrame",
lcp_timestamps[2].value());
}
...@@ -137,3 +137,33 @@ void MetricIntegrationTest::ExpectUKMPageLoadMetric(StringPiece metric_name, ...@@ -137,3 +137,33 @@ void MetricIntegrationTest::ExpectUKMPageLoadMetric(StringPiece metric_name,
TestUkmRecorder::ExpectEntryMetric(kv->second.get(), metric_name, TestUkmRecorder::ExpectEntryMetric(kv->second.get(), metric_name,
expected_value); expected_value);
} }
void MetricIntegrationTest::ExpectUKMPageLoadMetricNear(StringPiece metric_name,
double expected_value,
double epsilon) {
std::map<ukm::SourceId, ukm::mojom::UkmEntryPtr> merged_entries =
ukm_recorder().GetMergedEntriesByName(PageLoad::kEntryName);
EXPECT_EQ(1ul, merged_entries.size());
const auto& kv = merged_entries.begin();
const int64_t* recorded =
TestUkmRecorder::GetEntryMetric(kv->second.get(), metric_name);
EXPECT_NE(recorded, nullptr);
EXPECT_NEAR(*recorded, expected_value, epsilon);
}
void MetricIntegrationTest::ExpectUniqueUMAPageLoadMetricNear(
StringPiece metric_name,
double expected_value) {
EXPECT_EQ(histogram_tester_->GetAllSamples(metric_name).size(), 1u)
<< "There should be one sample for " << metric_name.data();
// UMA uses integer buckets so check that the value is in the bucket of
// |expected_value| or in the bucket of |expected_value| +- 1.
EXPECT_TRUE(
histogram_tester_->GetBucketCount(metric_name, expected_value) == 1 ||
histogram_tester_->GetBucketCount(metric_name, expected_value + 1.0) ==
1 ||
histogram_tester_->GetBucketCount(metric_name, expected_value - 1.0) == 1)
<< "The sample for " << metric_name.data()
<< " is not near the expected value!";
}
...@@ -91,6 +91,15 @@ class MetricIntegrationTest : public InProcessBrowserTest { ...@@ -91,6 +91,15 @@ class MetricIntegrationTest : public InProcessBrowserTest {
void ExpectUKMPageLoadMetric(base::StringPiece metric_name, void ExpectUKMPageLoadMetric(base::StringPiece metric_name,
int64_t expected_value); int64_t expected_value);
void ExpectUKMPageLoadMetricNear(base::StringPiece metric_name,
double expected_value,
double epsilon);
// Checks that the UMA entry is in the bucket for |expected_value| or within
// the bucket for |expected_value| +- 1.
void ExpectUniqueUMAPageLoadMetricNear(base::StringPiece metric_name,
double expected_value);
private: private:
static std::unique_ptr<net::test_server::HttpResponse> HandleRequest( static std::unique_ptr<net::test_server::HttpResponse> HandleRequest(
const std::string& relative_url, const std::string& relative_url,
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
metric_integration_sources = [ metric_integration_sources = [
"//chrome/browser/page_load_metrics/integration_tests/first_input_delay_browsertest.cc", "//chrome/browser/page_load_metrics/integration_tests/first_input_delay_browsertest.cc",
"//chrome/browser/page_load_metrics/integration_tests/largest_contentful_paint_browsertest.cc",
"//chrome/browser/page_load_metrics/integration_tests/layout_instability_browsertest.cc", "//chrome/browser/page_load_metrics/integration_tests/layout_instability_browsertest.cc",
"//chrome/browser/page_load_metrics/integration_tests/metric_integration_test.cc", "//chrome/browser/page_load_metrics/integration_tests/metric_integration_test.cc",
"//chrome/browser/page_load_metrics/integration_tests/metric_integration_test.h", "//chrome/browser/page_load_metrics/integration_tests/metric_integration_test.h",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment