Commit ef65d7a9 authored by Tom McKee's avatar Tom McKee Committed by Commit Bot

[UserTimingL3] Add 'duration' member of PerformanceMeasureOptions.

In the User Timing L3 spec
(https://w3c.github.io/user-timing/#performancemeasureoptions-dictionary),
PerformanceMeasureOptions dictionaries can have a 'duration' member.
This gives users better control over what timespan gets covered when
calling `performance.measure()`.

WPT coverage is also extended to check that user agents support the
'duration' member and raise errors as specified.

Bug: 953848
Change-Id: Ibd2a9536b1688e19bebfd559dcf9f3437b7d89e7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1753741
Commit-Queue: Tom McKee <tommckee@chromium.org>
Reviewed-by: default avatarNicolás Peña Moreno <npm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#687391}
parent b2140e0a
...@@ -87,7 +87,8 @@ const Performance::UnifiedClock* DefaultUnifiedClock() { ...@@ -87,7 +87,8 @@ const Performance::UnifiedClock* DefaultUnifiedClock() {
} }
bool IsMeasureOptionsEmpty(const PerformanceMeasureOptions& options) { bool IsMeasureOptionsEmpty(const PerformanceMeasureOptions& options) {
return !options.hasDetail() && !options.hasEnd() && !options.hasStart(); return !options.hasDetail() && !options.hasEnd() && !options.hasStart() &&
!options.hasDuration();
} }
} // namespace } // namespace
...@@ -673,28 +674,27 @@ PerformanceMeasure* Performance::measure( ...@@ -673,28 +674,27 @@ PerformanceMeasure* Performance::measure(
base::Optional<String>(end), exception_state); base::Optional<String>(end), exception_state);
} }
// |start_or_options|: while in options type, the value is an object {start, // |MeasureInternal| exists to unify the arguments from different
// end, detail}, and |end| must be null; while in string or double type, the // `performance.measure()` overloads into a consistent form, then delegate to
// value is start. So there are two ways we can input the start and end // |MeasureWithDetail|.
// (omitting |script_state| and |measure_name|):
// 1. measure(start, end)
// 2. measure({start, end})
// //
// For simplicity, the method below unifies these ways into a single form - // |start_or_options| is either a String or a dictionary of options. When it's
// measure(name, start, end, detail). The mapping between two measure methods // a String, it represents a starting performance mark. When it's a dictionary,
// (using measure_ to denote the measure after tranformation) goes as follows: // the allowed fields are 'start', 'duration', 'end' and 'detail'. However,
// 1. measure(start, end): measure_(start, end, null) // there are some combinations of fields and parameters which must raise
// 2. measure({start, end, detail}, null): measure_(start, end, detail) // errors. Specifically, the spec (https://https://w3c.github.io/user-timing/)
// 3. measure({start, end, detail}, end): invalid // requires errors to thrown in the following cases:
// - If |start_or_options| is a dictionary and 'end_mark' is passed.
// - If an options dictionary contains neither a 'start' nor an 'end' field.
// - If an options dictionary contains all of 'start', 'duration' and 'end'.
// //
// When |end| is null in C++, we cannot tell whether |end| is null, undefined or // |end_mark| will be base::nullopt unless the `performance.measure()` overload
// empty in JS from StringOrDouble, so we need |end_is_empty| to help // specified an end mark.
// distinguish between (null or undefined) and empty.
PerformanceMeasure* Performance::MeasureInternal( PerformanceMeasure* Performance::MeasureInternal(
ScriptState* script_state, ScriptState* script_state,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrPerformanceMeasureOptions& start_or_options, const StringOrPerformanceMeasureOptions& start_or_options,
base::Optional<String> end, base::Optional<String> end_mark,
ExceptionState& exception_state) { ExceptionState& exception_state) {
DCHECK(!start_or_options.IsNull()); DCHECK(!start_or_options.IsNull());
if (RuntimeEnabledFeatures::CustomUserTimingEnabled()) { if (RuntimeEnabledFeatures::CustomUserTimingEnabled()) {
...@@ -703,10 +703,10 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -703,10 +703,10 @@ PerformanceMeasure* Performance::MeasureInternal(
!IsMeasureOptionsEmpty( !IsMeasureOptionsEmpty(
*start_or_options.GetAsPerformanceMeasureOptions())) { *start_or_options.GetAsPerformanceMeasureOptions())) {
// measure("name", { start, end }, *) // measure("name", { start, end }, *)
if (end) { if (end_mark) {
exception_state.ThrowTypeError( exception_state.ThrowTypeError(
"If a PerformanceMeasureOptions object was passed, |end| must be " "If a non-empty PerformanceMeasureOptions object was passed, "
"null."); "|end_mark| must not be passed.");
return nullptr; return nullptr;
} }
const PerformanceMeasureOptions* options = const PerformanceMeasureOptions* options =
...@@ -717,9 +717,23 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -717,9 +717,23 @@ PerformanceMeasure* Performance::MeasureInternal(
"least one of its 'start' or 'end' properties must be present."); "least one of its 'start' or 'end' properties must be present.");
return nullptr; return nullptr;
} }
if (options->hasStart() && options->hasDuration() && options->hasEnd()) {
exception_state.ThrowTypeError(
"If a non-empty PerformanceMeasureOptions object was passed, it "
"must not have all of its 'start', 'duration', and 'end' "
"properties defined");
return nullptr;
}
base::Optional<double> duration = base::nullopt;
if (options->hasDuration()) {
duration = options->duration();
}
return MeasureWithDetail(script_state, measure_name, options->start(), return MeasureWithDetail(script_state, measure_name, options->start(),
options->end(), options->detail(), std::move(duration), options->end(),
exception_state); options->detail(), exception_state);
} }
// measure("name", "mark1", *) // measure("name", "mark1", *)
StringOrDouble converted_start; StringOrDouble converted_start;
...@@ -727,11 +741,12 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -727,11 +741,12 @@ PerformanceMeasure* Performance::MeasureInternal(
converted_start = converted_start =
StringOrDouble::FromString(start_or_options.GetAsString()); StringOrDouble::FromString(start_or_options.GetAsString());
} }
// We let |end| behave the same whether it's empty, undefined or null in // We let |end_mark| behave the same whether it's empty, undefined or null
// JS, as long as |end| is null in C++. // in JS, as long as |end_mark| is null in C++.
return MeasureWithDetail( return MeasureWithDetail(
script_state, measure_name, converted_start, script_state, measure_name, converted_start,
end ? StringOrDouble::FromString(*end) /* duration = */ base::nullopt,
end_mark ? StringOrDouble::FromString(*end_mark)
: NativeValueTraits<StringOrDouble>::NullValue(), : NativeValueTraits<StringOrDouble>::NullValue(),
ScriptValue::CreateNull(script_state), exception_state); ScriptValue::CreateNull(script_state), exception_state);
} }
...@@ -747,7 +762,8 @@ PerformanceMeasure* Performance::MeasureInternal( ...@@ -747,7 +762,8 @@ PerformanceMeasure* Performance::MeasureInternal(
} }
MeasureWithDetail(script_state, measure_name, converted_start, MeasureWithDetail(script_state, measure_name, converted_start,
end ? StringOrDouble::FromString(*end) /* duration = */ base::nullopt,
end_mark ? StringOrDouble::FromString(*end_mark)
: NativeValueTraits<StringOrDouble>::NullValue(), : NativeValueTraits<StringOrDouble>::NullValue(),
ScriptValue::CreateNull(script_state), exception_state); ScriptValue::CreateNull(script_state), exception_state);
// Return nullptr to distinguish from L3. // Return nullptr to distinguish from L3.
...@@ -758,14 +774,15 @@ PerformanceMeasure* Performance::MeasureWithDetail( ...@@ -758,14 +774,15 @@ PerformanceMeasure* Performance::MeasureWithDetail(
ScriptState* script_state, ScriptState* script_state,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrDouble& start, const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end, const StringOrDouble& end,
const ScriptValue& detail, const ScriptValue& detail,
ExceptionState& exception_state) { ExceptionState& exception_state) {
StringOrDouble original_start = start; StringOrDouble original_start = start;
StringOrDouble original_end = end; StringOrDouble original_end = end;
PerformanceMeasure* performance_measure = PerformanceMeasure* performance_measure = GetUserTiming().Measure(
GetUserTiming().Measure(script_state, measure_name, original_start, script_state, measure_name, original_start, std::move(duration),
original_end, detail, exception_state); original_end, detail, exception_state);
if (performance_measure) if (performance_measure)
NotifyObserversOfEntry(*performance_measure); NotifyObserversOfEntry(*performance_measure);
......
...@@ -304,12 +304,13 @@ class CORE_EXPORT Performance : public EventTargetWithInlineData { ...@@ -304,12 +304,13 @@ class CORE_EXPORT Performance : public EventTargetWithInlineData {
ScriptState*, ScriptState*,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrPerformanceMeasureOptions& start, const StringOrPerformanceMeasureOptions& start,
base::Optional<String> end, base::Optional<String> end_mark,
ExceptionState&); ExceptionState&);
PerformanceMeasure* MeasureWithDetail(ScriptState*, PerformanceMeasure* MeasureWithDetail(ScriptState*,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrDouble& start, const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end, const StringOrDouble& end,
const ScriptValue& detail, const ScriptValue& detail,
ExceptionState&); ExceptionState&);
......
...@@ -6,6 +6,6 @@ ...@@ -6,6 +6,6 @@
dictionary PerformanceMeasureOptions { dictionary PerformanceMeasureOptions {
any detail; any detail;
(DOMString or DOMHighResTimeStamp) start; (DOMString or DOMHighResTimeStamp) start;
// TODO(crbug.com/758385): add |duration| for better ergonomics. DOMHighResTimeStamp duration;
(DOMString or DOMHighResTimeStamp) end; (DOMString or DOMHighResTimeStamp) end;
}; };
...@@ -208,6 +208,7 @@ double UserTiming::GetTimeOrFindMarkTime(const AtomicString& measure_name, ...@@ -208,6 +208,7 @@ double UserTiming::GetTimeOrFindMarkTime(const AtomicString& measure_name,
PerformanceMeasure* UserTiming::Measure(ScriptState* script_state, PerformanceMeasure* UserTiming::Measure(ScriptState* script_state,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrDouble& start, const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end, const StringOrDouble& end,
const ScriptValue& detail, const ScriptValue& detail,
ExceptionState& exception_state) { ExceptionState& exception_state) {
...@@ -224,6 +225,19 @@ PerformanceMeasure* UserTiming::Measure(ScriptState* script_state, ...@@ -224,6 +225,19 @@ PerformanceMeasure* UserTiming::Measure(ScriptState* script_state,
if (exception_state.HadException()) if (exception_state.HadException())
return nullptr; return nullptr;
if (duration.has_value()) {
// When |duration| is specified, we require that exactly one of |start| and
// |end| were specified. Then, since |start| + |duration| = |end|, we'll
// compute the missing boundary.
if (start.IsNull()) {
start_time = end_time - duration.value();
} else {
DCHECK(end.IsNull()) << "When duration is specified, one of 'start' or "
"'end' must be unspecified";
end_time = start_time + duration.value();
}
}
// User timing events are stored as integer milliseconds from the start of // User timing events are stored as integer milliseconds from the start of
// navigation, whereas trace events accept double seconds based off of // navigation, whereas trace events accept double seconds based off of
// CurrentTime::monotonicallyIncreasingTime(). // CurrentTime::monotonicallyIncreasingTime().
......
...@@ -52,6 +52,7 @@ class UserTiming final : public GarbageCollected<UserTiming> { ...@@ -52,6 +52,7 @@ class UserTiming final : public GarbageCollected<UserTiming> {
PerformanceMeasure* Measure(ScriptState*, PerformanceMeasure* Measure(ScriptState*,
const AtomicString& measure_name, const AtomicString& measure_name,
const StringOrDouble& start, const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end, const StringOrDouble& end,
const ScriptValue& detail, const ScriptValue& detail,
ExceptionState&); ExceptionState&);
......
...@@ -30,10 +30,12 @@ async_test(function (t) { ...@@ -30,10 +30,12 @@ async_test(function (t) {
{ entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 }, { entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 }, { entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
{ entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 }, { entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
{ entryType: "measure", name: "measure18", detail: null, startTime: 0 }, { entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure19", detail: null, startTime: 0 }, { entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure20", detail: null, startTime: 0 }, { entryType: "measure", name: "measure20", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 }]; { entryType: "measure", name: "measure21", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure22", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure23", detail: null, startTime: 0 }];
const observer = new PerformanceObserver( const observer = new PerformanceObserver(
t.step_func(function (entryList, obs) { t.step_func(function (entryList, obs) {
measureEntries = measureEntries =
...@@ -80,11 +82,15 @@ async_test(function (t) { ...@@ -80,11 +82,15 @@ async_test(function (t) {
self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null })); self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
returnedEntries.push( returnedEntries.push(
self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }})); self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
returnedEntries.push(
self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 }));
returnedEntries.push(
self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 }));
// {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0. // {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
returnedEntries.push(self.performance.measure("measure18", {}, 'mark1')); returnedEntries.push(self.performance.measure("measure20", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure19", null, 'mark1')); returnedEntries.push(self.performance.measure("measure21", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", undefined, 'mark1')); returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", { invalidDict:1 }, 'mark1')); returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1'));
checkEntries(returnedEntries, expectedEntries); checkEntries(returnedEntries, expectedEntries);
}, "measure entries' detail and start/end are customizable"); }, "measure entries' detail and start/end are customizable");
......
...@@ -28,6 +28,7 @@ test_method_throw_exception('performance.measure("Exception5", "ExistMark", "Non ...@@ -28,6 +28,7 @@ test_method_throw_exception('performance.measure("Exception5", "ExistMark", "Non
test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR'); test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR');
test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR'); test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR');
test_method_throw_exception('performance.measure("Exception8", {"detail": "non-empty"})', TypeError()); test_method_throw_exception('performance.measure("Exception8", {"detail": "non-empty"})', TypeError());
test_method_throw_exception('performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3})', TypeError());
</script> </script>
</body> </body>
</html> </html>
...@@ -8,5 +8,6 @@ PASS Invocation of performance.measure("Exception5", "ExistMark", "NonExistMark1 ...@@ -8,5 +8,6 @@ PASS Invocation of performance.measure("Exception5", "ExistMark", "NonExistMark1
PASS Invocation of performance.measure("Exception6", "NonExistMark1", "NonExistMark2") should throw SYNTAX_ERR Exception. PASS Invocation of performance.measure("Exception6", "NonExistMark1", "NonExistMark2") should throw SYNTAX_ERR Exception.
PASS Invocation of performance.measure("Exception7", "redirectStart") should throw INVALID_ACCESS_ERR Exception. PASS Invocation of performance.measure("Exception7", "redirectStart") should throw INVALID_ACCESS_ERR Exception.
FAIL Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. assert_throws: Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. function "function() {eval(func_str)}" did not throw FAIL Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. assert_throws: Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. function "function() {eval(func_str)}" did not throw
FAIL Invocation of performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3}) should throw TypeError Exception. assert_throws: Invocation of performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3}) should throw TypeError Exception. function "function() {eval(func_str)}" did not throw
Harness: the test ran to completion. Harness: the test ran to completion.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment