Commit ef65d7a9 authored by Tom McKee's avatar Tom McKee Committed by Commit Bot

[UserTimingL3] Add 'duration' member of PerformanceMeasureOptions.

In the User Timing L3 spec
(https://w3c.github.io/user-timing/#performancemeasureoptions-dictionary),
PerformanceMeasureOptions dictionaries can have a 'duration' member.
This gives users better control over what timespan gets covered when
calling `performance.measure()`.

WPT coverage is also extended to check that user agents support the
'duration' member and raise errors as specified.

Bug: 953848
Change-Id: Ibd2a9536b1688e19bebfd559dcf9f3437b7d89e7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1753741
Commit-Queue: Tom McKee <tommckee@chromium.org>
Reviewed-by: default avatarNicolás Peña Moreno <npm@chromium.org>
Cr-Commit-Position: refs/heads/master@{#687391}
parent b2140e0a
......@@ -87,7 +87,8 @@ const Performance::UnifiedClock* DefaultUnifiedClock() {
}
bool IsMeasureOptionsEmpty(const PerformanceMeasureOptions& options) {
return !options.hasDetail() && !options.hasEnd() && !options.hasStart();
return !options.hasDetail() && !options.hasEnd() && !options.hasStart() &&
!options.hasDuration();
}
} // namespace
......@@ -673,28 +674,27 @@ PerformanceMeasure* Performance::measure(
base::Optional<String>(end), exception_state);
}
// |start_or_options|: while in options type, the value is an object {start,
// end, detail}, and |end| must be null; while in string or double type, the
// value is start. So there are two ways we can input the start and end
// (omitting |script_state| and |measure_name|):
// 1. measure(start, end)
// 2. measure({start, end})
// |MeasureInternal| exists to unify the arguments from different
// `performance.measure()` overloads into a consistent form, then delegate to
// |MeasureWithDetail|.
//
// For simplicity, the method below unifies these ways into a single form -
// measure(name, start, end, detail). The mapping between two measure methods
// (using measure_ to denote the measure after tranformation) goes as follows:
// 1. measure(start, end): measure_(start, end, null)
// 2. measure({start, end, detail}, null): measure_(start, end, detail)
// 3. measure({start, end, detail}, end): invalid
// |start_or_options| is either a String or a dictionary of options. When it's
// a String, it represents a starting performance mark. When it's a dictionary,
// the allowed fields are 'start', 'duration', 'end' and 'detail'. However,
// there are some combinations of fields and parameters which must raise
// errors. Specifically, the spec (https://https://w3c.github.io/user-timing/)
// requires errors to thrown in the following cases:
// - If |start_or_options| is a dictionary and 'end_mark' is passed.
// - If an options dictionary contains neither a 'start' nor an 'end' field.
// - If an options dictionary contains all of 'start', 'duration' and 'end'.
//
// When |end| is null in C++, we cannot tell whether |end| is null, undefined or
// empty in JS from StringOrDouble, so we need |end_is_empty| to help
// distinguish between (null or undefined) and empty.
// |end_mark| will be base::nullopt unless the `performance.measure()` overload
// specified an end mark.
PerformanceMeasure* Performance::MeasureInternal(
ScriptState* script_state,
const AtomicString& measure_name,
const StringOrPerformanceMeasureOptions& start_or_options,
base::Optional<String> end,
base::Optional<String> end_mark,
ExceptionState& exception_state) {
DCHECK(!start_or_options.IsNull());
if (RuntimeEnabledFeatures::CustomUserTimingEnabled()) {
......@@ -703,10 +703,10 @@ PerformanceMeasure* Performance::MeasureInternal(
!IsMeasureOptionsEmpty(
*start_or_options.GetAsPerformanceMeasureOptions())) {
// measure("name", { start, end }, *)
if (end) {
if (end_mark) {
exception_state.ThrowTypeError(
"If a PerformanceMeasureOptions object was passed, |end| must be "
"null.");
"If a non-empty PerformanceMeasureOptions object was passed, "
"|end_mark| must not be passed.");
return nullptr;
}
const PerformanceMeasureOptions* options =
......@@ -717,9 +717,23 @@ PerformanceMeasure* Performance::MeasureInternal(
"least one of its 'start' or 'end' properties must be present.");
return nullptr;
}
if (options->hasStart() && options->hasDuration() && options->hasEnd()) {
exception_state.ThrowTypeError(
"If a non-empty PerformanceMeasureOptions object was passed, it "
"must not have all of its 'start', 'duration', and 'end' "
"properties defined");
return nullptr;
}
base::Optional<double> duration = base::nullopt;
if (options->hasDuration()) {
duration = options->duration();
}
return MeasureWithDetail(script_state, measure_name, options->start(),
options->end(), options->detail(),
exception_state);
std::move(duration), options->end(),
options->detail(), exception_state);
}
// measure("name", "mark1", *)
StringOrDouble converted_start;
......@@ -727,12 +741,13 @@ PerformanceMeasure* Performance::MeasureInternal(
converted_start =
StringOrDouble::FromString(start_or_options.GetAsString());
}
// We let |end| behave the same whether it's empty, undefined or null in
// JS, as long as |end| is null in C++.
// We let |end_mark| behave the same whether it's empty, undefined or null
// in JS, as long as |end_mark| is null in C++.
return MeasureWithDetail(
script_state, measure_name, converted_start,
end ? StringOrDouble::FromString(*end)
: NativeValueTraits<StringOrDouble>::NullValue(),
/* duration = */ base::nullopt,
end_mark ? StringOrDouble::FromString(*end_mark)
: NativeValueTraits<StringOrDouble>::NullValue(),
ScriptValue::CreateNull(script_state), exception_state);
}
// For consistency with UserTimingL2: the L2 API took |start| as a string,
......@@ -747,8 +762,9 @@ PerformanceMeasure* Performance::MeasureInternal(
}
MeasureWithDetail(script_state, measure_name, converted_start,
end ? StringOrDouble::FromString(*end)
: NativeValueTraits<StringOrDouble>::NullValue(),
/* duration = */ base::nullopt,
end_mark ? StringOrDouble::FromString(*end_mark)
: NativeValueTraits<StringOrDouble>::NullValue(),
ScriptValue::CreateNull(script_state), exception_state);
// Return nullptr to distinguish from L3.
return nullptr;
......@@ -758,15 +774,16 @@ PerformanceMeasure* Performance::MeasureWithDetail(
ScriptState* script_state,
const AtomicString& measure_name,
const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end,
const ScriptValue& detail,
ExceptionState& exception_state) {
StringOrDouble original_start = start;
StringOrDouble original_end = end;
PerformanceMeasure* performance_measure =
GetUserTiming().Measure(script_state, measure_name, original_start,
original_end, detail, exception_state);
PerformanceMeasure* performance_measure = GetUserTiming().Measure(
script_state, measure_name, original_start, std::move(duration),
original_end, detail, exception_state);
if (performance_measure)
NotifyObserversOfEntry(*performance_measure);
return performance_measure;
......
......@@ -304,12 +304,13 @@ class CORE_EXPORT Performance : public EventTargetWithInlineData {
ScriptState*,
const AtomicString& measure_name,
const StringOrPerformanceMeasureOptions& start,
base::Optional<String> end,
base::Optional<String> end_mark,
ExceptionState&);
PerformanceMeasure* MeasureWithDetail(ScriptState*,
const AtomicString& measure_name,
const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end,
const ScriptValue& detail,
ExceptionState&);
......
......@@ -6,6 +6,6 @@
dictionary PerformanceMeasureOptions {
any detail;
(DOMString or DOMHighResTimeStamp) start;
// TODO(crbug.com/758385): add |duration| for better ergonomics.
DOMHighResTimeStamp duration;
(DOMString or DOMHighResTimeStamp) end;
};
......@@ -208,6 +208,7 @@ double UserTiming::GetTimeOrFindMarkTime(const AtomicString& measure_name,
PerformanceMeasure* UserTiming::Measure(ScriptState* script_state,
const AtomicString& measure_name,
const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end,
const ScriptValue& detail,
ExceptionState& exception_state) {
......@@ -224,6 +225,19 @@ PerformanceMeasure* UserTiming::Measure(ScriptState* script_state,
if (exception_state.HadException())
return nullptr;
if (duration.has_value()) {
// When |duration| is specified, we require that exactly one of |start| and
// |end| were specified. Then, since |start| + |duration| = |end|, we'll
// compute the missing boundary.
if (start.IsNull()) {
start_time = end_time - duration.value();
} else {
DCHECK(end.IsNull()) << "When duration is specified, one of 'start' or "
"'end' must be unspecified";
end_time = start_time + duration.value();
}
}
// User timing events are stored as integer milliseconds from the start of
// navigation, whereas trace events accept double seconds based off of
// CurrentTime::monotonicallyIncreasingTime().
......
......@@ -52,6 +52,7 @@ class UserTiming final : public GarbageCollected<UserTiming> {
PerformanceMeasure* Measure(ScriptState*,
const AtomicString& measure_name,
const StringOrDouble& start,
base::Optional<double> duration,
const StringOrDouble& end,
const ScriptValue& detail,
ExceptionState&);
......
......@@ -30,10 +30,12 @@ async_test(function (t) {
{ entryType: "measure", name: "measure15", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure16", detail: null, startTime: timeStamp1 },
{ entryType: "measure", name: "measure17", detail: { customInfo: 159 }, startTime: timeStamp3, duration: timeStamp2 - timeStamp3 },
{ entryType: "measure", name: "measure18", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure19", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure18", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure19", detail: null, startTime: timeStamp1, duration: timeStamp2 - timeStamp1 },
{ entryType: "measure", name: "measure20", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 }];
{ entryType: "measure", name: "measure21", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure22", detail: null, startTime: 0 },
{ entryType: "measure", name: "measure23", detail: null, startTime: 0 }];
const observer = new PerformanceObserver(
t.step_func(function (entryList, obs) {
measureEntries =
......@@ -80,11 +82,15 @@ async_test(function (t) {
self.performance.measure("measure16", { start: 'mark1', end: undefined, detail: null }));
returnedEntries.push(
self.performance.measure("measure17", { start: timeStamp3, end: 'mark2', detail: { customInfo: 159 }}));
returnedEntries.push(
self.performance.measure("measure18", { start: timeStamp1, duration: timeStamp2 - timeStamp1 }));
returnedEntries.push(
self.performance.measure("measure19", { duration: timeStamp2 - timeStamp1, end: timeStamp2 }));
// {}, null, undefined, invalid-dict passed to startOrOptions are interpreted as start time being 0.
returnedEntries.push(self.performance.measure("measure18", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure19", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", { invalidDict:1 }, 'mark1'));
returnedEntries.push(self.performance.measure("measure20", {}, 'mark1'));
returnedEntries.push(self.performance.measure("measure21", null, 'mark1'));
returnedEntries.push(self.performance.measure("measure22", undefined, 'mark1'));
returnedEntries.push(self.performance.measure("measure23", { invalidDict:1 }, 'mark1'));
checkEntries(returnedEntries, expectedEntries);
}, "measure entries' detail and start/end are customizable");
......
......@@ -28,6 +28,7 @@ test_method_throw_exception('performance.measure("Exception5", "ExistMark", "Non
test_method_throw_exception('performance.measure("Exception6", "NonExistMark1", "NonExistMark2")', 'SYNTAX_ERR');
test_method_throw_exception('performance.measure("Exception7", "redirectStart")', 'INVALID_ACCESS_ERR');
test_method_throw_exception('performance.measure("Exception8", {"detail": "non-empty"})', TypeError());
test_method_throw_exception('performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3})', TypeError());
</script>
</body>
</html>
......@@ -8,5 +8,6 @@ PASS Invocation of performance.measure("Exception5", "ExistMark", "NonExistMark1
PASS Invocation of performance.measure("Exception6", "NonExistMark1", "NonExistMark2") should throw SYNTAX_ERR Exception.
PASS Invocation of performance.measure("Exception7", "redirectStart") should throw INVALID_ACCESS_ERR Exception.
FAIL Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. assert_throws: Invocation of performance.measure("Exception8", {"detail": "non-empty"}) should throw TypeError Exception. function "function() {eval(func_str)}" did not throw
FAIL Invocation of performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3}) should throw TypeError Exception. assert_throws: Invocation of performance.measure("Exception9", {"start": 1, "duration": 2, "end": 3}) should throw TypeError Exception. function "function() {eval(func_str)}" did not throw
Harness: the test ran to completion.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment