Commit 311d7f42 authored by Thomas Guilbert's avatar Thomas Guilbert Committed by Commit Bot

[video-raf] Update dictionary members

'presentationTimestamp' is being renamed to 'mediaTime', which
differentiates it from 'presentationTime'

'expectedPresentationTime' is renamed to 'expectedDisplayTime' to make
the expected usage of the field more intuitive for web developers.

'elapsedProcessingTime' is renamed to 'processingDuration', since it is
a time delta, and not a timestamp.

'presentedFrames' and 'mediaTime' are also marked as required.

Bug: 1012063
Change-Id: Iaee640e9a41eb94155096ac49057cc5e14476ef5
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2099240
Commit-Queue: Thomas Guilbert <tguilbert@chromium.org>
Reviewed-by: default avatarDale Curtis <dalecurtis@chromium.org>
Reviewed-by: default avatarMounir Lamouri <mlamouri@chromium.org>
Cr-Commit-Position: refs/heads/master@{#755161}
parent e4a13a8b
......@@ -155,14 +155,14 @@ scoped_refptr<VideoFrame> VideoFrameCompositor::GetCurrentFrameOnAnyThread() {
void VideoFrameCompositor::SetCurrentFrame_Locked(
scoped_refptr<VideoFrame> frame,
base::TimeTicks expected_presentation_time) {
base::TimeTicks expected_display_time) {
DCHECK(task_runner_->BelongsToCurrentThread());
TRACE_EVENT1("media", "VideoFrameCompositor::SetCurrentFrame", "frame",
frame->AsHumanReadableString());
current_frame_lock_.AssertAcquired();
current_frame_ = std::move(frame);
last_presentation_time_ = tick_clock_->NowTicks();
last_expected_presentation_time_ = expected_presentation_time;
last_expected_display_time_ = expected_display_time;
++presentation_counter_;
}
......@@ -290,15 +290,14 @@ VideoFrameCompositor::GetLastPresentedFrameMetadata() {
base::AutoLock lock(current_frame_lock_);
last_frame = current_frame_;
frame_metadata->presentation_time = last_presentation_time_;
frame_metadata->expected_presentation_time =
last_expected_presentation_time_;
frame_metadata->expected_display_time = last_expected_display_time_;
frame_metadata->presented_frames = presentation_counter_;
}
frame_metadata->width = last_frame->visible_rect().width();
frame_metadata->height = last_frame->visible_rect().height();
frame_metadata->presentation_timestamp = last_frame->timestamp();
frame_metadata->media_time = last_frame->timestamp();
frame_metadata->metadata.MergeMetadataFrom(last_frame->metadata());
......
......@@ -187,7 +187,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
bool repaint_duplicate_frame);
void SetCurrentFrame_Locked(scoped_refptr<VideoFrame> frame,
base::TimeTicks expected_presentation_time);
base::TimeTicks expected_display_time);
// Called by |background_rendering_timer_| when enough time elapses where we
// haven't seen a Render() call.
......@@ -243,8 +243,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Used to fulfill video.requestAnimationFrame() calls.
// See https://wicg.github.io/video-raf/.
base::TimeTicks last_presentation_time_ GUARDED_BY(current_frame_lock_);
base::TimeTicks last_expected_presentation_time_
GUARDED_BY(current_frame_lock_);
base::TimeTicks last_expected_display_time_ GUARDED_BY(current_frame_lock_);
uint32_t presentation_counter_ GUARDED_BY(current_frame_lock_) = 0u;
// These values are updated and read from the media and compositor threads.
......
......@@ -214,7 +214,7 @@ TEST_P(VideoFrameCompositorTest, RenderFiresPrensentationCallback) {
auto metadata = compositor()->GetLastPresentedFrameMetadata();
EXPECT_NE(base::TimeTicks(), metadata->presentation_time);
EXPECT_NE(base::TimeTicks(), metadata->expected_presentation_time);
EXPECT_NE(base::TimeTicks(), metadata->expected_display_time);
}
TEST_P(VideoFrameCompositorTest, MultiplePresentationCallbacks) {
......
......@@ -132,10 +132,10 @@ class WebMediaPlayer {
struct VideoFramePresentationMetadata {
uint32_t presented_frames;
base::TimeTicks presentation_time;
base::TimeTicks expected_presentation_time;
base::TimeTicks expected_display_time;
int width;
int height;
base::TimeDelta presentation_timestamp;
base::TimeDelta media_time;
media::VideoFrameMetadata metadata;
base::TimeDelta rendering_interval;
base::TimeDelta average_frame_duration;
......
......@@ -543,7 +543,7 @@ void WebMediaPlayerMSCompositor::RenderWithoutAlgorithmOnCompositor(
void WebMediaPlayerMSCompositor::SetCurrentFrame(
scoped_refptr<media::VideoFrame> frame,
base::Optional<base::TimeTicks> expected_presentation_time) {
base::Optional<base::TimeTicks> expected_display_time) {
DCHECK(video_frame_compositor_task_runner_->BelongsToCurrentThread());
current_frame_lock_.AssertAcquired();
TRACE_EVENT_INSTANT1("media", "WebMediaPlayerMSCompositor::SetCurrentFrame",
......@@ -594,7 +594,7 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame(
// we only use RenderWithoutAlgorithm.
base::TimeTicks now = base::TimeTicks::Now();
last_presentation_time_ = now;
last_expected_presentation_time_ = expected_presentation_time.value_or(now);
last_expected_display_time_ = expected_display_time.value_or(now);
++presented_frames_;
OnNewFramePresentedCB presented_frame_cb;
......@@ -744,8 +744,7 @@ WebMediaPlayerMSCompositor::GetLastPresentedFrameMetadata() {
base::AutoLock lock(current_frame_lock_);
last_frame = current_frame_;
frame_metadata->presentation_time = last_presentation_time_;
frame_metadata->expected_presentation_time =
last_expected_presentation_time_;
frame_metadata->expected_display_time = last_expected_display_time_;
frame_metadata->presented_frames = static_cast<uint32_t>(presented_frames_);
frame_metadata->average_frame_duration = GetPreferredRenderInterval();
......@@ -755,7 +754,7 @@ WebMediaPlayerMSCompositor::GetLastPresentedFrameMetadata() {
frame_metadata->width = last_frame->visible_rect().width();
frame_metadata->height = last_frame->visible_rect().height();
frame_metadata->presentation_timestamp = last_frame->timestamp();
frame_metadata->media_time = last_frame->timestamp();
frame_metadata->metadata.MergeMetadataFrom(last_frame->metadata());
......
......@@ -236,8 +236,7 @@ class MODULES_EXPORT WebMediaPlayerMSCompositor
// TODO(https://crbug.com/1050755): Improve the accuracy of these fields for
// cases where we only use RenderWithoutAlgorithm().
base::TimeTicks last_presentation_time_ GUARDED_BY(current_frame_lock_);
base::TimeTicks last_expected_presentation_time_
GUARDED_BY(current_frame_lock_);
base::TimeTicks last_expected_display_time_ GUARDED_BY(current_frame_lock_);
size_t presented_frames_ GUARDED_BY(current_frame_lock_) = 0u;
bool stopped_;
......
......@@ -1393,7 +1393,7 @@ TEST_P(WebMediaPlayerMSTest, RequestAnimationFrame) {
auto metadata = player_->GetVideoFramePresentationMetadata();
EXPECT_GT(metadata->presentation_time, base::TimeTicks());
EXPECT_GE(metadata->expected_presentation_time, metadata->presentation_time);
EXPECT_GE(metadata->expected_display_time, metadata->presentation_time);
testing::Mock::VerifyAndClearExpectations(this);
// Make sure multiple calls to RAF only result in one call per frame to OnRAF.
......
......@@ -9,7 +9,7 @@ dictionary VideoFrameMetadata {
required DOMHighResTimeStamp presentationTime;
// The time at which the user agent expects the frame to be visible.
required DOMHighResTimeStamp expectedPresentationTime;
required DOMHighResTimeStamp expectedDisplayTime;
// The visible width and height of the presented video frame.
required unsigned long width;
......@@ -17,7 +17,7 @@ dictionary VideoFrameMetadata {
// The presentation timestamp in seconds of the frame presented. May not be
// known to the compositor or exist in all cases.
double presentationTimestamp;
required double mediaTime;
// The elapsed time in seconds from submission of the encoded packet with
// the same presentationTimestamp as this frame to the decoder until the
......@@ -25,13 +25,13 @@ dictionary VideoFrameMetadata {
//
// In addition to decoding time, may include processing time. E.g., YUV
// conversion and/or staging into GPU backed memory.
double elapsedProcessingTime;
double processingDuration;
// A count of the number of frames submitted for composition. Allows clients
// to determine if frames were missed between VideoFrameRequestCallbacks.
//
// https://wiki.whatwg.org/wiki/Video_Metrics#presentedFrames
unsigned long presentedFrames;
required unsigned long presentedFrames;
// For video frames coming from either a local or remote source, this is
// the time at which the frame was captured by the camera. For a remote
......
......@@ -142,22 +142,22 @@ void VideoRequestAnimationFrameImpl::ExecuteFrameCallbacks(
time_converter.MonotonicTimeToZeroBasedDocumentTime(
frame_metadata->presentation_time)));
metadata->setExpectedPresentationTime(GetClampedTimeInMillis(
metadata->setExpectedDisplayTime(GetClampedTimeInMillis(
time_converter.MonotonicTimeToZeroBasedDocumentTime(
frame_metadata->expected_presentation_time)));
frame_metadata->expected_display_time)));
metadata->setPresentedFrames(frame_metadata->presented_frames);
metadata->setWidth(frame_metadata->width);
metadata->setHeight(frame_metadata->height);
metadata->setPresentationTimestamp(
frame_metadata->presentation_timestamp.InSecondsF());
metadata->setMediaTime(frame_metadata->media_time.InSecondsF());
base::TimeDelta elapsed;
base::TimeDelta processing_duration;
if (frame_metadata->metadata.GetTimeDelta(
media::VideoFrameMetadata::PROCESSING_TIME, &elapsed)) {
metadata->setElapsedProcessingTime(GetCoarseClampedTimeInSeconds(elapsed));
media::VideoFrameMetadata::PROCESSING_TIME, &processing_duration)) {
metadata->setProcessingDuration(
GetCoarseClampedTimeInSeconds(processing_duration));
}
base::TimeTicks capture_time;
......
......@@ -68,10 +68,10 @@ class MetadataHelper {
copy->presented_frames = metadata_.presented_frames;
copy->presentation_time = metadata_.presentation_time;
copy->expected_presentation_time = metadata_.expected_presentation_time;
copy->expected_display_time = metadata_.expected_display_time;
copy->width = metadata_.width;
copy->height = metadata_.height;
copy->presentation_timestamp = metadata_.presentation_timestamp;
copy->media_time = metadata_.media_time;
copy->metadata.MergeMetadataFrom(&(metadata_.metadata));
return copy;
......@@ -89,11 +89,11 @@ class MetadataHelper {
metadata_.presented_frames = 42;
metadata_.presentation_time =
now + base::TimeDelta::FromMillisecondsD(10.1234);
metadata_.expected_presentation_time =
metadata_.expected_display_time =
now + base::TimeDelta::FromMillisecondsD(26.3467);
metadata_.width = 320;
metadata_.height = 480;
metadata_.presentation_timestamp = base::TimeDelta::FromSecondsD(3.14);
metadata_.media_time = base::TimeDelta::FromSecondsD(3.14);
metadata_.metadata.SetTimeDelta(media::VideoFrameMetadata::PROCESSING_TIME,
base::TimeDelta::FromMillisecondsD(60.982));
metadata_.metadata.SetTimeTicks(
......@@ -133,8 +133,7 @@ class VideoRafParameterVerifierCallback
EXPECT_EQ(expected->presented_frames, metadata->presentedFrames());
EXPECT_EQ((unsigned int)expected->width, metadata->width());
EXPECT_EQ((unsigned int)expected->height, metadata->height());
EXPECT_EQ(expected->presentation_timestamp.InSecondsF(),
metadata->presentationTimestamp());
EXPECT_EQ(expected->media_time.InSecondsF(), metadata->mediaTime());
double rtp_timestamp;
EXPECT_TRUE(expected->metadata.GetDouble(
......@@ -144,9 +143,9 @@ class VideoRafParameterVerifierCallback
// Verify that values were correctly clamped.
VerifyTicksClamping(expected->presentation_time,
metadata->presentationTime(), "presentation_time");
VerifyTicksClamping(expected->expected_presentation_time,
metadata->expectedPresentationTime(),
"expected_presentation_time");
VerifyTicksClamping(expected->expected_display_time,
metadata->expectedDisplayTime(),
"expected_display_time");
base::TimeTicks capture_time;
EXPECT_TRUE(expected->metadata.GetTimeTicks(
......@@ -162,8 +161,8 @@ class VideoRafParameterVerifierCallback
EXPECT_TRUE(expected->metadata.GetTimeDelta(
media::VideoFrameMetadata::PROCESSING_TIME, &processing_time));
EXPECT_EQ(ClampElapsedProcessingTime(processing_time),
metadata->elapsedProcessingTime());
EXPECT_NE(processing_time.InSecondsF(), metadata->elapsedProcessingTime());
metadata->processingDuration());
EXPECT_NE(processing_time.InSecondsF(), metadata->processingDuration());
}
double last_now() { return now_; }
......
......@@ -110,13 +110,17 @@ VideoFrameMetadata* WebGLVideoTexture::VideoElementTargetVideoTexture(
// would need to save the current time as well as the presentation time.
current_frame_metadata_->setPresentationTime(
frame_metadata_ptr->timestamp.InMicrosecondsF());
current_frame_metadata_->setExpectedPresentationTime(
current_frame_metadata_->setExpectedDisplayTime(
frame_metadata_ptr->expected_timestamp.InMicrosecondsF());
current_frame_metadata_->setWidth(frame_metadata_ptr->visible_rect.width());
current_frame_metadata_->setHeight(frame_metadata_ptr->visible_rect.height());
current_frame_metadata_->setPresentationTimestamp(
current_frame_metadata_->setMediaTime(
frame_metadata_ptr->timestamp.InSecondsF());
// This is a required field. It is supposed to be monotonically increasing for
// video.requestAnimationFrame, but it isn't used yet for WebGLVideoTexture.
current_frame_metadata_->setPresentedFrames(0);
return current_frame_metadata_;
#endif // defined OS_ANDROID
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment