Commit eb436ddb authored by emircan's avatar emircan Committed by Commit bot

Revert of Use webrtc::VideoFrame timestamp in RTCVideoEncoder (patchset #7...

Revert of Use webrtc::VideoFrame timestamp in RTCVideoEncoder (patchset #7 id:260001 of https://codereview.chromium.org/2205623002/ )

Reason for revert:
This CL caused some regressions regarding BWE stats and HW encoder
performance. Reasons include:
1) Modifying scoped_refptr<media::VideoFrame>'s timestamp causes problems
for other clients using the same media::VideoFrame.
2) WebRTC's RTP timestamp isn't suitable for using as presentation timestamp
in Mac and Win HW encoders.

BUG=641230

Original issue's description:
> Use webrtc::VideoFrame timestamp in RTCVideoEncoder
>
> This CL fixes input timestamp mismatch in RTCVideoEncoder, which
> broke googAvgEncodeMs and googEncodeUsagePercent stats in webrtc-internals
> for hardware encoders.
> With this change, we start using WebRTC given timestamp() so that
> OveruseFrameDetector can match the timestamps and calculate the stats.
>
> BUG=597087
> TEST=googAvgEncodeMs and googEncodeUsagePercent works on Mac(H264) and
> veyron_jerry(VP8).
>
> Committed: https://crrev.com/e3195490a63d9545fb1bfe560aa21680ba0b5843
> Cr-Commit-Position: refs/heads/master@{#414589}

TBR=wuchengli@chromium.org,pbos@chromium.org,posciak@chromium.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=597087

Review-Url: https://codereview.chromium.org/2296273002
Cr-Commit-Position: refs/heads/master@{#415752}
parent 8809c442
...@@ -32,9 +32,6 @@ namespace content { ...@@ -32,9 +32,6 @@ namespace content {
namespace { namespace {
// Used for timestamp conversions.
static const int64_t kMsToRtpTimestamp = 90;
// Translate from webrtc::VideoCodecType and webrtc::VideoCodec to // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
// media::VideoCodecProfile. // media::VideoCodecProfile.
media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
...@@ -477,25 +474,27 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, ...@@ -477,25 +474,27 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
output_buffers_free_count_--; output_buffers_free_count_--;
// Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
int64_t rtp_timestamp, capture_time_ms; int64_t capture_time_us, capture_time_ms;
uint32_t rtp_timestamp;
if (!timestamp.is_zero()) { if (!timestamp.is_zero()) {
// Get RTP timestamp value. capture_time_us = timestamp.InMicroseconds();;
rtp_timestamp = timestamp.ToInternalValue(); capture_time_ms = timestamp.InMilliseconds();
capture_time_ms = rtp_timestamp / kMsToRtpTimestamp;
} else { } else {
// Fallback to the current time if encoder does not provide timestamp. // Fallback to the current time if encoder does not provide timestamp.
rtp_timestamp = rtc::TimeMicros() * kMsToRtpTimestamp / capture_time_us = rtc::TimeMicros();
base::Time::kMicrosecondsPerMillisecond; capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond;
capture_time_ms =
rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond;
} }
// RTP timestamp can wrap around. Get the lower 32 bits.
rtp_timestamp = static_cast<uint32_t>(
capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
webrtc::EncodedImage image( webrtc::EncodedImage image(
reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
output_buffer->mapped_size()); output_buffer->mapped_size());
image._encodedWidth = input_visible_size_.width(); image._encodedWidth = input_visible_size_.width();
image._encodedHeight = input_visible_size_.height(); image._encodedHeight = input_visible_size_.height();
image._timeStamp = static_cast<uint32_t>(rtp_timestamp); image._timeStamp = rtp_timestamp;
image.capture_time_ms_ = capture_time_ms; image.capture_time_ms_ = capture_time_ms;
image._frameType = image._frameType =
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
...@@ -572,13 +571,15 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { ...@@ -572,13 +571,15 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() {
} }
if (requires_copy) { if (requires_copy) {
const base::TimeDelta timestamp =
frame ? frame->timestamp()
: base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms());
base::SharedMemory* input_buffer = input_buffers_[index]; base::SharedMemory* input_buffer = input_buffers_[index];
frame = media::VideoFrame::WrapExternalSharedMemory( frame = media::VideoFrame::WrapExternalSharedMemory(
media::PIXEL_FORMAT_I420, input_frame_coded_size_, media::PIXEL_FORMAT_I420, input_frame_coded_size_,
gfx::Rect(input_visible_size_), input_visible_size_, gfx::Rect(input_visible_size_), input_visible_size_,
reinterpret_cast<uint8_t*>(input_buffer->memory()), reinterpret_cast<uint8_t*>(input_buffer->memory()),
input_buffer->mapped_size(), input_buffer->handle(), 0, input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp);
base::TimeDelta());
if (!frame.get()) { if (!frame.get()) {
LogAndNotifyError(FROM_HERE, "failed to create frame", LogAndNotifyError(FROM_HERE, "failed to create frame",
media::VideoEncodeAccelerator::kPlatformFailureError); media::VideoEncodeAccelerator::kPlatformFailureError);
...@@ -610,9 +611,6 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { ...@@ -610,9 +611,6 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() {
return; return;
} }
} }
// Use the timestamp set from WebRTC and set it in 90 kHz.
frame->set_timestamp(
base::TimeDelta::FromInternalValue(next_frame->timestamp()));
frame->AddDestructionObserver(media::BindToCurrentLoop( frame->AddDestructionObserver(media::BindToCurrentLoop(
base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
video_encoder_->Encode(frame, next_frame_keyframe); video_encoder_->Encode(frame, next_frame_keyframe);
......
...@@ -570,9 +570,6 @@ class H264Validator : public StreamValidator { ...@@ -570,9 +570,6 @@ class H264Validator : public StreamValidator {
void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
h264_parser_.SetStream(stream, static_cast<off_t>(size)); h264_parser_.SetStream(stream, static_cast<off_t>(size));
// Run |frame_cb_| for only first nalu.
bool frame_cb_called = false;
while (1) { while (1) {
H264NALU nalu; H264NALU nalu;
H264Parser::Result result; H264Parser::Result result;
...@@ -593,14 +590,10 @@ void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { ...@@ -593,14 +590,10 @@ void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
keyframe = true; keyframe = true;
// fallthrough // fallthrough
case H264NALU::kNonIDRSlice: { case H264NALU::kNonIDRSlice: {
// Stream may contain at most one frame.
ASSERT_TRUE(seen_idr_); ASSERT_TRUE(seen_idr_);
seen_sps_ = seen_pps_ = false; seen_sps_ = seen_pps_ = false;
if (!frame_cb_called) { if (!frame_cb_.Run(keyframe))
frame_cb_called = true; return;
if (!frame_cb_.Run(keyframe))
return;
}
break; break;
} }
...@@ -1850,15 +1843,6 @@ INSTANTIATE_TEST_CASE_P(MultipleEncoders, ...@@ -1850,15 +1843,6 @@ INSTANTIATE_TEST_CASE_P(MultipleEncoders,
false, false,
false, false,
false))); false)));
#if defined(OS_MACOSX)
INSTANTIATE_TEST_CASE_P(
VerifyTimestamp,
VideoEncodeAcceleratorTest,
::testing::Values(
std::make_tuple(1, false, 0, false, false, false, false, false, true)));
#endif // defined(OS_MACOSX)
#if defined(OS_WIN) #if defined(OS_WIN)
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_CASE_P(
ForceBitrate, ForceBitrate,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment