Commit 66be5149 authored by Johannes Kron's avatar Johannes Kron Committed by Commit Bot

[video-raf] Add receive time to video.rAF metadata

Add receive time to video.rAF for remote video sources
according to a recent update of the specification

https://github.com/WICG/video-raf/pull/17

Bug: chromium:1011581
Change-Id: I7bd7450ff50e5c69f5e85ca3f1182d4d5f264848
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2073940
Commit-Queue: Johannes Kron <kron@google.com>
Reviewed-by: default avatarMounir Lamouri <mlamouri@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarThomas Guilbert <tguilbert@chromium.org>
Cr-Commit-Position: refs/heads/master@{#746106}
parent 467b6a6c
...@@ -177,6 +177,12 @@ class MEDIA_EXPORT VideoFrameMetadata { ...@@ -177,6 +177,12 @@ class MEDIA_EXPORT VideoFrameMetadata {
// https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource
RTP_TIMESTAMP, RTP_TIMESTAMP,
// For video frames coming from a remote source, this is the time the
// encoded frame was received by the platform, i.e., the time at
// which the last packet belonging to this frame was received over the
// network.
RECEIVE_TIME,
NUM_KEYS NUM_KEYS
}; };
......
...@@ -298,7 +298,18 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame( ...@@ -298,7 +298,18 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
incoming_frame.processing_time()->Elapsed().us())); incoming_frame.processing_time()->Elapsed().us()));
} }
// Set capture time to arrival of last packet. // Set capture time to the NTP time, which is the estimated capture time
// converted to the local clock.
if (incoming_frame.ntp_time_ms() != 0) {
const base::TimeTicks capture_time =
base::TimeTicks() +
base::TimeDelta::FromMilliseconds(incoming_frame.ntp_time_ms()) +
time_diff_;
video_frame->metadata()->SetTimeTicks(
media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, capture_time);
}
// Set receive time to arrival of last packet.
if (!incoming_frame.packet_infos().empty()) { if (!incoming_frame.packet_infos().empty()) {
int64_t last_packet_arrival_ms = int64_t last_packet_arrival_ms =
std::max_element( std::max_element(
...@@ -308,12 +319,11 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame( ...@@ -308,12 +319,11 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
return a.receive_time_ms() < b.receive_time_ms(); return a.receive_time_ms() < b.receive_time_ms();
}) })
->receive_time_ms(); ->receive_time_ms();
const base::TimeTicks capture_time = const base::TimeTicks receive_time =
base::TimeTicks() + base::TimeTicks() +
base::TimeDelta::FromMilliseconds(last_packet_arrival_ms) + time_diff_; base::TimeDelta::FromMilliseconds(last_packet_arrival_ms) + time_diff_;
video_frame->metadata()->SetTimeTicks( video_frame->metadata()->SetTimeTicks(
media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, capture_time); media::VideoFrameMetadata::RECEIVE_TIME, receive_time);
} }
// Use our computed render time as estimated capture time. If timestamp_us() // Use our computed render time as estimated capture time. If timestamp_us()
......
...@@ -324,14 +324,21 @@ TEST_F(MediaStreamRemoteVideoSourceTest, ...@@ -324,14 +324,21 @@ TEST_F(MediaStreamRemoteVideoSourceTest,
webrtc::Timestamp::Millis(rtc::TimeMillis()); webrtc::Timestamp::Millis(rtc::TimeMillis());
const webrtc::Timestamp kProcessingStart = const webrtc::Timestamp kProcessingStart =
kProcessingFinish - webrtc::TimeDelta::Millis(1.0e3 * kProcessingTime); kProcessingFinish - webrtc::TimeDelta::Millis(1.0e3 * kProcessingTime);
const webrtc::Timestamp kCaptureTime =
kProcessingStart - webrtc::TimeDelta::Millis(20.0);
// Expected capture time in Chromium epoch.
base::TimeTicks kExpectedCaptureTime =
base::TimeTicks() + base::TimeDelta::FromMilliseconds(kCaptureTime.ms()) +
time_diff();
webrtc::RtpPacketInfos::vector_type packet_infos; webrtc::RtpPacketInfos::vector_type packet_infos;
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
packet_infos.emplace_back(kSsrc, kCsrcs, kRtpTimestamp, absl::nullopt, packet_infos.emplace_back(kSsrc, kCsrcs, kRtpTimestamp, absl::nullopt,
absl::nullopt, kProcessingStart.ms() - 100 + i); absl::nullopt, kProcessingStart.ms() - 100 + i);
} }
// Capture time should be the same as the last arrival time. // Expected receive time should be the same as the last arrival time, in
base::TimeTicks kExpectedCaptureTime = // Chromium epoch.
base::TimeTicks kExpectedReceiveTime =
base::TimeTicks() + base::TimeTicks() +
base::TimeDelta::FromMilliseconds(kProcessingStart.ms() - 100 + 3) + base::TimeDelta::FromMilliseconds(kProcessingStart.ms() - 100 + 3) +
time_diff(); time_diff();
...@@ -340,6 +347,7 @@ TEST_F(MediaStreamRemoteVideoSourceTest, ...@@ -340,6 +347,7 @@ TEST_F(MediaStreamRemoteVideoSourceTest,
webrtc::VideoFrame::Builder() webrtc::VideoFrame::Builder()
.set_video_frame_buffer(buffer) .set_video_frame_buffer(buffer)
.set_timestamp_rtp(kRtpTimestamp) .set_timestamp_rtp(kRtpTimestamp)
.set_ntp_time_ms(kCaptureTime.ms())
.set_packet_infos(webrtc::RtpPacketInfos(packet_infos)) .set_packet_infos(webrtc::RtpPacketInfos(packet_infos))
.build(); .build();
...@@ -362,6 +370,12 @@ TEST_F(MediaStreamRemoteVideoSourceTest, ...@@ -362,6 +370,12 @@ TEST_F(MediaStreamRemoteVideoSourceTest,
EXPECT_NEAR((capture_time - kExpectedCaptureTime).InMillisecondsF(), 0.0f, EXPECT_NEAR((capture_time - kExpectedCaptureTime).InMillisecondsF(), 0.0f,
kChromiumWebRtcMaxTimeDiffMs); kChromiumWebRtcMaxTimeDiffMs);
base::TimeTicks receive_time;
EXPECT_TRUE(output_frame->metadata()->GetTimeTicks(
media::VideoFrameMetadata::RECEIVE_TIME, &receive_time));
EXPECT_NEAR((receive_time - kExpectedReceiveTime).InMillisecondsF(), 0.0f,
kChromiumWebRtcMaxTimeDiffMs);
double rtp_timestamp; double rtp_timestamp;
EXPECT_TRUE(output_frame->metadata()->GetDouble( EXPECT_TRUE(output_frame->metadata()->GetDouble(
media::VideoFrameMetadata::RTP_TIMESTAMP, &rtp_timestamp)); media::VideoFrameMetadata::RTP_TIMESTAMP, &rtp_timestamp));
......
...@@ -33,10 +33,18 @@ dictionary VideoFrameMetadata { ...@@ -33,10 +33,18 @@ dictionary VideoFrameMetadata {
// https://wiki.whatwg.org/wiki/Video_Metrics#presentedFrames // https://wiki.whatwg.org/wiki/Video_Metrics#presentedFrames
unsigned long presentedFrames; unsigned long presentedFrames;
// For video frames coming from a local device like a camera, the time at // For video frames coming from either a local or remote source, this is
// which the frame was received from the device. // the time at which the frame was captured by the camera. For a remote
// source, the capture time is estimated using clock synchronization and
// RTCP sender reports to convert RTP timestamps to capture time as
// specified in RFC 3550 Section 6.4.1.
DOMHighResTimeStamp captureTime; DOMHighResTimeStamp captureTime;
// For video frames coming from a remote source, this is the time the
// encoded frame was received by the platform, i.e., the time at which the
// last packet belonging to this frame was received over the network.
DOMHighResTimeStamp receiveTime;
// The RTP timestamp associated with this video frame. // The RTP timestamp associated with this video frame.
// //
// https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource
......
...@@ -167,6 +167,13 @@ void VideoRequestAnimationFrameImpl::ExecuteFrameCallbacks( ...@@ -167,6 +167,13 @@ void VideoRequestAnimationFrameImpl::ExecuteFrameCallbacks(
time_converter.MonotonicTimeToZeroBasedDocumentTime(capture_time))); time_converter.MonotonicTimeToZeroBasedDocumentTime(capture_time)));
} }
base::TimeTicks receive_time;
if (frame_metadata->metadata.GetTimeTicks(
media::VideoFrameMetadata::RECEIVE_TIME, &receive_time)) {
metadata->setReceiveTime(GetClampedTimeInMillis(
time_converter.MonotonicTimeToZeroBasedDocumentTime(receive_time)));
}
double rtp_timestamp; double rtp_timestamp;
if (frame_metadata->metadata.GetDouble( if (frame_metadata->metadata.GetDouble(
media::VideoFrameMetadata::RTP_TIMESTAMP, &rtp_timestamp)) { media::VideoFrameMetadata::RTP_TIMESTAMP, &rtp_timestamp)) {
......
...@@ -102,6 +102,9 @@ class MetadataHelper { ...@@ -102,6 +102,9 @@ class MetadataHelper {
metadata_.metadata.SetTimeTicks( metadata_.metadata.SetTimeTicks(
media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, media::VideoFrameMetadata::CAPTURE_BEGIN_TIME,
now + base::TimeDelta::FromMillisecondsD(5.6785)); now + base::TimeDelta::FromMillisecondsD(5.6785));
metadata_.metadata.SetTimeTicks(
media::VideoFrameMetadata::RECEIVE_TIME,
now + base::TimeDelta::FromMillisecondsD(17.1234));
metadata_.metadata.SetDouble(media::VideoFrameMetadata::RTP_TIMESTAMP, metadata_.metadata.SetDouble(media::VideoFrameMetadata::RTP_TIMESTAMP,
12345); 12345);
...@@ -152,6 +155,11 @@ class VideoRafParameterVerifierCallback ...@@ -152,6 +155,11 @@ class VideoRafParameterVerifierCallback
media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, &capture_time)); media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, &capture_time));
VerifyTicksClamping(capture_time, metadata->captureTime(), "capture_time"); VerifyTicksClamping(capture_time, metadata->captureTime(), "capture_time");
base::TimeTicks receive_time;
EXPECT_TRUE(expected->metadata.GetTimeTicks(
media::VideoFrameMetadata::RECEIVE_TIME, &receive_time));
VerifyTicksClamping(receive_time, metadata->receiveTime(), "receive_time");
base::TimeDelta processing_time; base::TimeDelta processing_time;
EXPECT_TRUE(expected->metadata.GetTimeDelta( EXPECT_TRUE(expected->metadata.GetTimeDelta(
media::VideoFrameMetadata::PROCESSING_TIME, &processing_time)); media::VideoFrameMetadata::PROCESSING_TIME, &processing_time));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment