Commit 34d38f69 authored by Liquan(Max) Gu's avatar Liquan(Max) Gu Committed by Commit Bot

[UserTimingL3] {}, null, undefined passed to start treated as 0

According to the spec (https://w3c.github.io/user-timing/#measure-method),
UserTimingL3 API has the second argument being:
optional (DOMString or PerformanceMeasureOptions) startOrMeasureOptions

This indicates that the arg cannot distinguish between:
1. null
2. {}
3. undefined
4. { invalidDictProperty }

According to the spec:
3.1.3 measure() method...
 3. Compute start time as follows..
  4. Otherwise, let start time be 0.

When these indistinguishable values are passed, they should be
interpreted as start time being 0.

Bug: 953840

Change-Id: I052a7823d9ae8b27056f53f04a26fcc93421db75
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1691862
Commit-Queue: Liquan (Max) Gu <maxlg@chromium.org>
Reviewed-by: default avatarNicolás Peña Moreno <npm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#675873}
parent a134d1c3
...@@ -86,6 +86,10 @@ const Performance::UnifiedClock* DefaultUnifiedClock() { ...@@ -86,6 +86,10 @@ const Performance::UnifiedClock* DefaultUnifiedClock() {
return &unified_clock; return &unified_clock;
} }
bool IsMeasureOptionsEmpty(const PerformanceMeasureOptions& options) {
return !options.hasDetail() && !options.hasEnd() && !options.hasStart();
}
} // namespace } // namespace
using PerformanceObserverVector = HeapVector<Member<PerformanceObserver>>; using PerformanceObserverVector = HeapVector<Member<PerformanceObserver>>;
...@@ -643,9 +647,12 @@ void Performance::clearMarks(const AtomicString& mark_name) { ...@@ -643,9 +647,12 @@ void Performance::clearMarks(const AtomicString& mark_name) {
PerformanceMeasure* Performance::measure(ScriptState* script_state, PerformanceMeasure* Performance::measure(ScriptState* script_state,
const AtomicString& measure_name, const AtomicString& measure_name,
ExceptionState& exception_state) { ExceptionState& exception_state) {
// When |startOrOptions| is not provided, it's assumed to be an empty
// dictionary.
return MeasureInternal( return MeasureInternal(
script_state, measure_name, script_state, measure_name,
NativeValueTraits<StringOrPerformanceMeasureOptions>::NullValue(), StringOrPerformanceMeasureOptions::FromPerformanceMeasureOptions(
PerformanceMeasureOptions::Create()),
base::nullopt, exception_state); base::nullopt, exception_state);
} }
...@@ -691,9 +698,13 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -691,9 +698,13 @@ PerformanceMeasure* Performance::MeasureInternal(
const StringOrPerformanceMeasureOptions& start_or_options, const StringOrPerformanceMeasureOptions& start_or_options,
base::Optional<String> end, base::Optional<String> end,
ExceptionState& exception_state) { ExceptionState& exception_state) {
DCHECK(!start_or_options.IsNull());
if (RuntimeEnabledFeatures::CustomUserTimingEnabled()) { if (RuntimeEnabledFeatures::CustomUserTimingEnabled()) {
if (start_or_options.IsPerformanceMeasureOptions()) { // An empty option is treated with no difference as null, undefined.
// measure("name", {}, *) if (start_or_options.IsPerformanceMeasureOptions() &&
!IsMeasureOptionsEmpty(
*start_or_options.GetAsPerformanceMeasureOptions())) {
// measure("name", { start, end }, *)
if (end) { if (end) {
exception_state.ThrowTypeError( exception_state.ThrowTypeError(
"If a PerformanceMeasureOptions object was passed, |end| must be " "If a PerformanceMeasureOptions object was passed, |end| must be "
...@@ -712,7 +723,8 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -712,7 +723,8 @@ PerformanceMeasure* Performance::MeasureInternal(
converted_start = converted_start =
StringOrDouble::FromString(start_or_options.GetAsString()); StringOrDouble::FromString(start_or_options.GetAsString());
} else { } else {
DCHECK(start_or_options.IsNull()); DCHECK(start_or_options.IsNull() ||
start_or_options.IsPerformanceMeasureOptions());
converted_start = NativeValueTraits<StringOrDouble>::NullValue(); converted_start = NativeValueTraits<StringOrDouble>::NullValue();
} }
// We let |end| behave the same whether it's empty, undefined or null in // We let |end| behave the same whether it's empty, undefined or null in
...@@ -729,13 +741,12 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -729,13 +741,12 @@ PerformanceMeasure* Performance::MeasureInternal(
// string 'null'. // string 'null'.
StringOrDouble converted_start; StringOrDouble converted_start;
if (start_or_options.IsPerformanceMeasureOptions()) { if (start_or_options.IsPerformanceMeasureOptions()) {
converted_start = StringOrDouble::FromString("[object Object]"); converted_start = NativeValueTraits<StringOrDouble>::NullValue();
} else if (start_or_options.IsString()) { } else {
// |start_or_options| is not nullable.
DCHECK(start_or_options.IsString());
converted_start = converted_start =
StringOrDouble::FromString(start_or_options.GetAsString()); StringOrDouble::FromString(start_or_options.GetAsString());
} else {
DCHECK(start_or_options.IsNull());
DCHECK(converted_start.IsNull());
} }
MeasureWithDetail(script_state, measure_name, converted_start, MeasureWithDetail(script_state, measure_name, converted_start,
......
...@@ -73,7 +73,7 @@ interface Performance : EventTarget { ...@@ -73,7 +73,7 @@ interface Performance : EventTarget {
// * L2 API returns null but this is a bug: crbug.com/914441. // * L2 API returns null but this is a bug: crbug.com/914441.
// * L3 API returns the created entry. // * L3 API returns the created entry.
// https://w3c.github.io/user-timing/#extensions-performance-interface // https://w3c.github.io/user-timing/#extensions-performance-interface
[MeasureAs=UserTiming, CallWith=ScriptState, RaisesException] PerformanceMeasure? measure(DOMString measureName, optional (DOMString or PerformanceMeasureOptions)? startOrOptions, optional DOMString end); [MeasureAs=UserTiming, CallWith=ScriptState, RaisesException] PerformanceMeasure? measure(DOMString measureName, optional (DOMString or PerformanceMeasureOptions) startOrOptions, optional DOMString end);
[MeasureAs=UserTiming] void clearMeasures(optional DOMString measureName = null); [MeasureAs=UserTiming] void clearMeasures(optional DOMString measureName = null);
// TODO(foolip): There is no spec for the Memory Info API, see blink-dev: // TODO(foolip): There is no spec for the Memory Info API, see blink-dev:
......
...@@ -34,7 +34,11 @@ ...@@ -34,7 +34,11 @@
{ entryType: "measure", name: "measure14", detail: null, startTime: timeStamp3, duration: timeStamp1 - timeStamp3 }, { entryType: "measure", name: "measure14", detail: null, startTime: timeStamp3, duration: timeStamp1 - timeStamp3 },
{ entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, { entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 }, { entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
{ entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 }]; { entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
{ entryType: "measure", name: "measure18", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure19", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure20", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 }];
const observer = new PerformanceObserver( const observer = new PerformanceObserver(
t.step_func(function (entryList, obs) { t.step_func(function (entryList, obs) {
measureEntries = measureEntries =
...@@ -81,19 +85,21 @@ ...@@ -81,19 +85,21 @@
self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null })); self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
returnedEntries.push( returnedEntries.push(
self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }})); self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
// {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
returnedEntries.push(self.performance.measure("measure18", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure19", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", { invalidDict:1 }, 'mark1'));
checkEntries(returnedEntries, expectedEntries); checkEntries(returnedEntries, expectedEntries);
}, "measure entries' detail and start/end are customizable"); }, "measure entries' detail and start/end are customizable");
test(function () { test(function() {
this.add_cleanup(cleanupPerformanceTimeline); this.add_cleanup(cleanupPerformanceTimeline);
assert_throws(new TypeError(), function() { assert_throws(new TypeError(), function() {
self.performance.measure("wrongUsage1", {}, 12); self.performance.measure("wrongUsage2", {'start': 2}, 12);
}, "measure should throw a TypeError when passed an options object and an end time"); }, "measure should throw a TypeError when passed an options object and an end time");
assert_throws(new TypeError(), function() { assert_throws(new TypeError(), function() {
self.performance.measure("wrongUsage2", {'startTime': 2}, 12); self.performance.measure("wrongUsage3", {'start': 2}, 'mark1');
}, "measure should throw a TypeError when passed an options object and an end time");
assert_throws(new TypeError(), function() {
self.performance.measure("wrongUsage3", {'startTime': 2}, 'mark1');
}, "measure should throw a TypeError when passed an options object and an end mark"); }, "measure should throw a TypeError when passed an options object and an end mark");
}, "measure should throw a TypeError when passed an invalid argument combination"); }, "measure should throw a TypeError when passed an invalid argument combination");
</script> </script>
This is a testharness.js-based test. This is a testharness.js-based test.
FAIL L3: performance.measure(name) should return an entry. assert_true: expected true got false FAIL L3: performance.measure(name) should return an entry. assert_true: expected true got false
FAIL L3: performance.measure(name, param1) should return an entry. Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist. FAIL L3: performance.measure(name, param1) should return an entry. assert_true: expected true got false
FAIL L3: performance.measure(name, param1, param2) should return an entry. assert_true: expected true got false FAIL L3: performance.measure(name, param1, param2) should return an entry. assert_true: expected true got false
FAIL L3: performance.mark(name) should return an entry. assert_true: expected true got false FAIL L3: performance.mark(name) should return an entry. assert_true: expected true got false
FAIL L3: performance.mark(name, param) should return an entry. assert_true: expected true got false FAIL L3: performance.mark(name, param) should return an entry. assert_true: expected true got false
......
This is a testharness.js-based test. This is a testharness.js-based test.
FAIL measure entries' detail and start/end are customizable Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist. FAIL measure entries' detail and start/end are customizable Cannot read property 'name' of null
FAIL measure should throw a TypeError when passed an invalid argument combination assert_throws: measure should throw a TypeError when passed an options object and an end time function "function() { FAIL measure should throw a TypeError when passed an invalid argument combination assert_throws: measure should throw a TypeError when passed an options object and an end time function "function() {
self.performance.measure("wrongUsage1", {}, 12); self.performance.measure("wrongUsage2", {'start': 2}, 12);
}" threw object "SyntaxError: Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist." ("SyntaxError") expected object "TypeError" ("TypeError") }" threw object "SyntaxError: Failed to execute 'measure' on 'Performance': The mark '12' does not exist." ("SyntaxError") expected object "TypeError" ("TypeError")
Harness: the test ran to completion. Harness: the test ran to completion.
...@@ -5,12 +5,12 @@ FAIL When accessing detail from a mark entry and the detail is not provided, jus ...@@ -5,12 +5,12 @@ FAIL When accessing detail from a mark entry and the detail is not provided, jus
FAIL Mark: Throw an exception when the detail property cannot be structured-serialized. assert_throws: Trying to structured-serialize a Symbol. function "()=>{ FAIL Mark: Throw an exception when the detail property cannot be structured-serialized. assert_throws: Trying to structured-serialize a Symbol. function "()=>{
new PerformanceMark("A", { detail }); new PerformanceMark("A", { detail });
}" did not throw }" did not throw
FAIL The detail property in the measure method should be structured-clone. Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist. FAIL The detail property in the measure method should be structured-clone. Cannot read property 'detail' of null
FAIL The detail property in the measure method should be the same reference. Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist. FAIL The detail property in the measure method should be the same reference. Cannot read property 'detail' of null
FAIL When accessing detail from a measure entry and the detail is not provided, just return a null value. Cannot read property 'detail' of null FAIL When accessing detail from a measure entry and the detail is not provided, just return a null value. Cannot read property 'detail' of null
FAIL Measure: Throw an exception when the detail property cannot be structured-serialized. assert_throws: Trying to structured-serialize a Symbol. function "()=>{ FAIL Measure: Throw an exception when the detail property cannot be structured-serialized. assert_throws: Trying to structured-serialize a Symbol. function "()=>{
performance.measure("A", { detail }); performance.measure("A", { detail });
}" threw object "SyntaxError: Failed to execute 'measure' on 'Performance': The mark '[object Object]' does not exist." that is not a DOMException DataCloneError: property "code" is equal to 12, expected 25 }" did not throw
FAIL The detail object is cloned when passed to mark API. Cannot read property 'detail' of null FAIL The detail object is cloned when passed to mark API. Cannot read property 'detail' of null
Harness: the test ran to completion. Harness: the test ran to completion.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment