Commit 4fedb7de authored by Emircan Uysaler's avatar Emircan Uysaler Committed by Commit Bot

Preserve render_time_ms() as capture_time_ms_

After changes, webrtc expects capture_time_ms_ to be same as render_time_ms() of
EncodedImage passed for encode. Instead of deriving something from current time,
we can do the best effort of preserving it.

Bug: 801327
Change-Id: I66f80f059da0c96d6e052ed105c86e9269d0e1e5
Reviewed-on: https://chromium-review.googlesource.com/893425Reviewed-by: default avatarChristian Fremerey <chfremer@chromium.org>
Commit-Queue: Emircan Uysaler <emircan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#533031}
parent b3a47fa2
...@@ -42,10 +42,15 @@ namespace content { ...@@ -42,10 +42,15 @@ namespace content {
namespace { namespace {
struct RTCTimestamps { struct RTCTimestamps {
RTCTimestamps(const base::TimeDelta& media_timestamp, int32_t rtp_timestamp) RTCTimestamps(const base::TimeDelta& media_timestamp,
: media_timestamp_(media_timestamp), rtp_timestamp(rtp_timestamp) {} int32_t rtp_timestamp,
int64_t capture_time_ms)
: media_timestamp_(media_timestamp),
rtp_timestamp(rtp_timestamp),
capture_time_ms(capture_time_ms) {}
const base::TimeDelta media_timestamp_; const base::TimeDelta media_timestamp_;
const int32_t rtp_timestamp; const int32_t rtp_timestamp;
const int64_t capture_time_ms;
}; };
webrtc::VideoCodecType ProfileToWebRtcVideoCodecType( webrtc::VideoCodecType ProfileToWebRtcVideoCodecType(
...@@ -255,10 +260,6 @@ class RTCVideoEncoder::Impl ...@@ -255,10 +260,6 @@ class RTCVideoEncoder::Impl
// 15 bits running index of the VP8 frames. See VP8 RTP spec for details. // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
uint16_t picture_id_; uint16_t picture_id_;
// |capture_time_ms_| field of the last returned webrtc::EncodedImage from
// BitstreamBufferReady().
int64_t last_capture_time_ms_;
// webrtc::VideoEncoder encode complete callback. // webrtc::VideoEncoder encode complete callback.
webrtc::EncodedImageCallback* encoded_image_callback_; webrtc::EncodedImageCallback* encoded_image_callback_;
...@@ -287,7 +288,6 @@ RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, ...@@ -287,7 +288,6 @@ RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories,
input_next_frame_(nullptr), input_next_frame_(nullptr),
input_next_frame_keyframe_(false), input_next_frame_keyframe_(false),
output_buffers_free_count_(0), output_buffers_free_count_(0),
last_capture_time_ms_(-1),
encoded_image_callback_(nullptr), encoded_image_callback_(nullptr),
video_codec_type_(video_codec_type), video_codec_type_(video_codec_type),
status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) {
...@@ -500,23 +500,17 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, ...@@ -500,23 +500,17 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
} }
output_buffers_free_count_--; output_buffers_free_count_--;
// Derive the capture time in ms from system clock. Make sure that it is // Find RTP and capture timestamps by going through |pending_timestamps_|.
// greater than the last. // Derive it from current time otherwise.
const int64_t capture_time_us = rtc::TimeMicros();
int64_t capture_time_ms =
capture_time_us / base::Time::kMicrosecondsPerMillisecond;
capture_time_ms = std::max(capture_time_ms, last_capture_time_ms_ + 1);
last_capture_time_ms_ = capture_time_ms;
// Find RTP timestamp by going through |pending_timestamps_|. Derive it from
// capture time otherwise.
base::Optional<uint32_t> rtp_timestamp; base::Optional<uint32_t> rtp_timestamp;
base::Optional<int64_t> capture_timestamp_ms;
if (!failed_timestamp_match_) { if (!failed_timestamp_match_) {
// Pop timestamps until we have a match. // Pop timestamps until we have a match.
while (!pending_timestamps_.empty()) { while (!pending_timestamps_.empty()) {
const auto& front_timestamps = pending_timestamps_.front(); const auto& front_timestamps = pending_timestamps_.front();
if (front_timestamps.media_timestamp_ == timestamp) { if (front_timestamps.media_timestamp_ == timestamp) {
rtp_timestamp = front_timestamps.rtp_timestamp; rtp_timestamp = front_timestamps.rtp_timestamp;
capture_timestamp_ms = front_timestamps.capture_time_ms;
pending_timestamps_.pop_front(); pending_timestamps_.pop_front();
break; break;
} }
...@@ -524,12 +518,14 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, ...@@ -524,12 +518,14 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
} }
DCHECK(rtp_timestamp.has_value()); DCHECK(rtp_timestamp.has_value());
} }
if (!rtp_timestamp.has_value()) { if (!rtp_timestamp.has_value() || !capture_timestamp_ms.has_value()) {
failed_timestamp_match_ = true; failed_timestamp_match_ = true;
pending_timestamps_.clear(); pending_timestamps_.clear();
const int64_t current_time_ms =
rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond;
// RTP timestamp can wrap around. Get the lower 32 bits. // RTP timestamp can wrap around. Get the lower 32 bits.
rtp_timestamp = static_cast<uint32_t>( rtp_timestamp = static_cast<uint32_t>(current_time_ms * 90);
capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); capture_timestamp_ms = current_time_ms;
} }
webrtc::EncodedImage image( webrtc::EncodedImage image(
...@@ -538,7 +534,7 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, ...@@ -538,7 +534,7 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
image._encodedWidth = input_visible_size_.width(); image._encodedWidth = input_visible_size_.width();
image._encodedHeight = input_visible_size_.height(); image._encodedHeight = input_visible_size_.height();
image._timeStamp = rtp_timestamp.value(); image._timeStamp = rtp_timestamp.value();
image.capture_time_ms_ = capture_time_ms; image.capture_time_ms_ = capture_timestamp_ms.value();
image._frameType = image._frameType =
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
image._completeFrame = true; image._completeFrame = true;
...@@ -670,7 +666,8 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { ...@@ -670,7 +666,8 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() {
return entry.media_timestamp_ == frame->timestamp(); return entry.media_timestamp_ == frame->timestamp();
}) == pending_timestamps_.end()); }) == pending_timestamps_.end());
pending_timestamps_.emplace_back(frame->timestamp(), pending_timestamps_.emplace_back(frame->timestamp(),
next_frame->timestamp()); next_frame->timestamp(),
next_frame->render_time_ms());
} }
video_encoder_->Encode(frame, next_frame_keyframe); video_encoder_->Encode(frame, next_frame_keyframe);
input_buffers_free_.pop_back(); input_buffers_free_.pop_back();
......
...@@ -185,11 +185,13 @@ class RTCVideoEncoderTest ...@@ -185,11 +185,13 @@ class RTCVideoEncoderTest
} }
void VerifyTimestamp(uint32_t rtp_timestamp, void VerifyTimestamp(uint32_t rtp_timestamp,
int64_t capture_time_ms,
const webrtc::EncodedImage& encoded_image, const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info, const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) { const webrtc::RTPFragmentationHeader* fragmentation) {
DVLOG(3) << __func__; DVLOG(3) << __func__;
EXPECT_EQ(rtp_timestamp, encoded_image._timeStamp); EXPECT_EQ(rtp_timestamp, encoded_image._timeStamp);
EXPECT_EQ(capture_time_ms, encoded_image.capture_time_ms_);
} }
protected: protected:
...@@ -299,9 +301,10 @@ TEST_F(RTCVideoEncoderTest, PreserveTimestamps) { ...@@ -299,9 +301,10 @@ TEST_F(RTCVideoEncoderTest, PreserveTimestamps) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, rtc_encoder_->InitEncode(&codec, 1, 12345)); EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, rtc_encoder_->InitEncode(&codec, 1, 12345));
const uint32_t rtp_timestamp = 1234567; const uint32_t rtp_timestamp = 1234567;
const uint32_t capture_time_ms = 3456789;
RegisterEncodeCompleteCallback( RegisterEncodeCompleteCallback(
base::Bind(&RTCVideoEncoderTest::VerifyTimestamp, base::Unretained(this), base::Bind(&RTCVideoEncoderTest::VerifyTimestamp, base::Unretained(this),
rtp_timestamp)); rtp_timestamp, capture_time_ms));
EXPECT_CALL(*mock_vea_, Encode(_, _)) EXPECT_CALL(*mock_vea_, Encode(_, _))
.WillOnce(Invoke(this, &RTCVideoEncoderTest::ReturnFrameWithTimeStamp)); .WillOnce(Invoke(this, &RTCVideoEncoderTest::ReturnFrameWithTimeStamp));
...@@ -311,6 +314,7 @@ TEST_F(RTCVideoEncoderTest, PreserveTimestamps) { ...@@ -311,6 +314,7 @@ TEST_F(RTCVideoEncoderTest, PreserveTimestamps) {
std::vector<webrtc::FrameType> frame_types; std::vector<webrtc::FrameType> frame_types;
webrtc::VideoFrame rtc_frame(buffer, rtp_timestamp, 0, webrtc::VideoFrame rtc_frame(buffer, rtp_timestamp, 0,
webrtc::kVideoRotation_0); webrtc::kVideoRotation_0);
rtc_frame.set_timestamp_us(capture_time_ms * rtc::kNumMicrosecsPerMillisec);
// We need to set ntp_time_ms because it will be used to derive // We need to set ntp_time_ms because it will be used to derive
// media::VideoFrame timestamp. // media::VideoFrame timestamp.
rtc_frame.set_ntp_time_ms(4567891); rtc_frame.set_ntp_time_ms(4567891);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment