Commit ee4a4236 authored by Dan Sanders's avatar Dan Sanders Committed by Commit Bot

Add RTCVideoDecoderAdapter, which can use any media::VideoDecoder.

This is the main body of code required to support MojoVideoDecoder for
hardware decode of WebRTC streams. It tries to be as similar as possible
to RTCVideoDecoder so that a transition can be managed slowly.

The implementation actually supports any media::VideoDecoder, in
particular FFmpegVideoDecoder works well for testing.

Bug: 857111
Change-Id: I0a4dc37a0a133b4c112a55a215aba8058cca598b
Reviewed-on: https://chromium-review.googlesource.com/1117837
Commit-Queue: Dan Sanders <sandersd@chromium.org>
Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Cr-Commit-Position: refs/heads/master@{#582291}
parent 8526d438
...@@ -407,6 +407,8 @@ target(link_target_type, "renderer") { ...@@ -407,6 +407,8 @@ target(link_target_type, "renderer") {
"media/webrtc/rtc_stats.h", "media/webrtc/rtc_stats.h",
"media/webrtc/rtc_video_decoder.cc", "media/webrtc/rtc_video_decoder.cc",
"media/webrtc/rtc_video_decoder.h", "media/webrtc/rtc_video_decoder.h",
"media/webrtc/rtc_video_decoder_adapter.cc",
"media/webrtc/rtc_video_decoder_adapter.h",
"media/webrtc/rtc_video_decoder_factory.cc", "media/webrtc/rtc_video_decoder_factory.cc",
"media/webrtc/rtc_video_decoder_factory.h", "media/webrtc/rtc_video_decoder_factory.h",
"media/webrtc/rtc_video_encoder.cc", "media/webrtc/rtc_video_encoder.cc",
......
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/renderer/media/webrtc/rtc_video_decoder_adapter.h"
#include <algorithm>
#include "base/bind.h"
#include "base/command_line.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "content/renderer/media/render_media_log.h"
#include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
#include "media/base/media_log.h"
#include "media/base/media_util.h"
#include "media/base/video_types.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "third_party/webrtc/api/video/video_frame.h"
#include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h"
#include "third_party/webrtc/rtc_base/bind.h"
#include "third_party/webrtc/rtc_base/refcount.h"
#include "third_party/webrtc/rtc_base/refcountedobject.h"
#if defined(OS_WIN)
#include "base/command_line.h"
#include "base/win/windows_version.h"
#include "content/public/common/content_switches.h"
#endif // defined(OS_WIN)
namespace content {
namespace {
// Any reasonable size, will be overridden by the decoder anyway.
//
// TODO(sandersd): DXVA VDA does not compute a visible rect itself; we should
// not enable RTCVideoDecoderAdapter on Windows until that is fixed.
// https://crbug.com/869660.
const gfx::Size kDefaultSize(640, 480);
// Assumed pixel format of the encoded content. WebRTC doesn't tell us, and in
// practice the decoders ignore it. We're going with generic 4:2:0.
const media::VideoPixelFormat kDefaultPixelFormat = media::PIXEL_FORMAT_I420;
// Maximum number of buffers that we will queue in |pending_buffers_|.
const int32_t kMaxPendingBuffers = 8;
// Maximum number of timestamps that will be maintained in |decode_timestamps_|.
// Really only needs to be a bit larger than the maximum reorder distance (which
// is presumably 0 for WebRTC), but being larger doesn't hurt much.
const int32_t kMaxDecodeHistory = 32;
// Maximum number of consecutive frames that can fail to decode before
// requesting fallback to software decode.
const int32_t kMaxConsecutiveErrors = 5;
// Map webrtc::VideoCodecType to media::VideoCodec.
media::VideoCodec ToVideoCodec(webrtc::VideoCodecType video_codec_type) {
switch (video_codec_type) {
case webrtc::kVideoCodecVP8:
return media::kCodecVP8;
case webrtc::kVideoCodecVP9:
return media::kCodecVP9;
case webrtc::kVideoCodecH264:
return media::kCodecH264;
default:
return media::kUnknownVideoCodec;
}
}
// Map webrtc::VideoCodecType to a guess for media::VideoCodecProfile.
media::VideoCodecProfile GuessVideoCodecProfile(
webrtc::VideoCodecType video_codec_type) {
switch (video_codec_type) {
case webrtc::kVideoCodecVP8:
return media::VP8PROFILE_ANY;
case webrtc::kVideoCodecVP9:
return media::VP9PROFILE_PROFILE0;
case webrtc::kVideoCodecH264:
return media::H264PROFILE_BASELINE;
default:
return media::VIDEO_CODEC_PROFILE_UNKNOWN;
}
}
void FinishWait(base::WaitableEvent* waiter, bool* result_out, bool result) {
DVLOG(3) << __func__ << "(" << result << ")";
*result_out = result;
waiter->Signal();
}
} // namespace
// static
std::unique_ptr<RTCVideoDecoderAdapter> RTCVideoDecoderAdapter::Create(
webrtc::VideoCodecType video_codec_type,
media::GpuVideoAcceleratorFactories* gpu_factories,
CreateVideoDecoderCB create_video_decoder_cb) {
DVLOG(1) << __func__ << "(" << video_codec_type << ")";
#if defined(OS_WIN)
// Do not use hardware decoding for H.264 on Win7, due to high latency.
// See https://crbug.com/webrtc/5717.
if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kEnableWin7WebRtcHWH264Decoding) &&
video_codec_type == webrtc::kVideoCodecH264 &&
base::win::GetVersion() == base::win::VERSION_WIN7) {
DVLOG(1) << "H.264 HW decoding is not supported on Win7";
return nullptr;
}
#endif // defined(OS_WIN)
// Short circuit known-unsupported codecs.
if (ToVideoCodec(video_codec_type) == media::kUnknownVideoCodec)
return nullptr;
std::unique_ptr<RTCVideoDecoderAdapter> rtc_video_decoder_adapter =
base::WrapUnique(new RTCVideoDecoderAdapter(
gpu_factories->GetTaskRunner(), std::move(create_video_decoder_cb),
video_codec_type));
// Synchronously verify that the decoder can be initialized.
if (!rtc_video_decoder_adapter->InitializeSync()) {
DeleteSoonOnMediaThread(std::move(rtc_video_decoder_adapter),
gpu_factories);
return nullptr;
}
return rtc_video_decoder_adapter;
}
// static
void RTCVideoDecoderAdapter::DeleteSoonOnMediaThread(
std::unique_ptr<webrtc::VideoDecoder> rtc_video_decoder_adapter,
media::GpuVideoAcceleratorFactories* gpu_factories) {
gpu_factories->GetTaskRunner()->DeleteSoon(
FROM_HERE, std::move(rtc_video_decoder_adapter));
}
RTCVideoDecoderAdapter::RTCVideoDecoderAdapter(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
CreateVideoDecoderCB create_video_decoder_cb,
webrtc::VideoCodecType video_codec_type)
: media_task_runner_(std::move(media_task_runner)),
create_video_decoder_cb_(std::move(create_video_decoder_cb)),
video_codec_type_(video_codec_type),
weak_this_factory_(this) {
DVLOG(1) << __func__;
DETACH_FROM_THREAD(decoding_thread_checker_);
weak_this_ = weak_this_factory_.GetWeakPtr();
}
RTCVideoDecoderAdapter::~RTCVideoDecoderAdapter() {
DVLOG(1) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
}
bool RTCVideoDecoderAdapter::InitializeSync() {
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(worker_thread_checker_);
bool result = false;
base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
media::VideoDecoder::InitCB init_cb =
base::BindRepeating(&FinishWait, &waiter, &result);
if (media_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&RTCVideoDecoderAdapter::InitializeOnMediaThread,
base::Unretained(this), std::move(init_cb)))) {
waiter.Wait();
}
return result;
}
int32_t RTCVideoDecoderAdapter::InitDecode(
const webrtc::VideoCodec* codec_settings,
int32_t number_of_cores) {
DVLOG(1) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(decoding_thread_checker_);
DCHECK_EQ(video_codec_type_, codec_settings->codecType);
base::AutoLock auto_lock(lock_);
UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", !has_error_);
return has_error_ ? WEBRTC_VIDEO_CODEC_UNINITIALIZED : WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoDecoderAdapter::Decode(
const webrtc::EncodedImage& input_image,
bool missing_frames,
const webrtc::CodecSpecificInfo* codec_specific_info,
int64_t render_time_ms) {
DVLOG(2) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(decoding_thread_checker_);
#if defined(OS_WIN)
// Hardware VP9 decoders don't handle more than one spatial layer. Fall back
// to software decoding. See https://crbug.com/webrtc/9304.
if (codec_specific_info &&
codec_specific_info->codecType == webrtc::kVideoCodecVP9 &&
codec_specific_info->codecSpecific.VP9.ss_data_available &&
codec_specific_info->codecSpecific.VP9.num_spatial_layers > 1) {
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
#endif // defined(OS_WIN)
if (missing_frames || !input_image._completeFrame) {
DVLOG(2) << "Missing or incomplete frames";
// We probably can't handle broken frames. Request a key frame.
return WEBRTC_VIDEO_CODEC_ERROR;
}
// Convert to media::DecoderBuffer.
// TODO(sandersd): What is |render_time_ms|?
scoped_refptr<media::DecoderBuffer> buffer =
media::DecoderBuffer::CopyFrom(input_image._buffer, input_image._length);
buffer->set_timestamp(
base::TimeDelta::FromMicroseconds(input_image._timeStamp));
// Queue for decoding.
{
base::AutoLock auto_lock(lock_);
if (has_error_)
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
if (pending_buffers_.size() >= kMaxPendingBuffers) {
// We are severely behind. Drop pending buffers and request a keyframe to
// catch up as quickly as possible.
DVLOG(2) << "Pending buffers overflow";
if (++consecutive_error_count_ > kMaxConsecutiveErrors) {
pending_buffers_.clear();
decode_timestamps_.clear();
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
pending_buffers_.clear();
return WEBRTC_VIDEO_CODEC_ERROR;
}
pending_buffers_.push_back(std::move(buffer));
}
media_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&RTCVideoDecoderAdapter::DecodeOnMediaThread, weak_this_));
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoDecoderAdapter::RegisterDecodeCompleteCallback(
webrtc::DecodedImageCallback* callback) {
DVLOG(2) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(decoding_thread_checker_);
DCHECK(callback);
base::AutoLock auto_lock(lock_);
decode_complete_callback_ = callback;
return has_error_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
: WEBRTC_VIDEO_CODEC_OK;
}
int32_t RTCVideoDecoderAdapter::Release() {
DVLOG(1) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(worker_thread_checker_);
base::AutoLock auto_lock(lock_);
pending_buffers_.clear();
decode_timestamps_.clear();
return has_error_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
: WEBRTC_VIDEO_CODEC_OK;
}
const char* RTCVideoDecoderAdapter::ImplementationName() const {
return "ExternalDecoder";
}
void RTCVideoDecoderAdapter::InitializeOnMediaThread(
media::VideoDecoder::InitCB init_cb) {
DVLOG(3) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
// TODO(sandersd): Plumb a real log sink here so that we can contribute to the
// media-internals UI. The current log just discards all messages.
media_log_ = std::make_unique<media::MediaLog>();
video_decoder_ = create_video_decoder_cb_.Run(media_log_.get());
if (!video_decoder_) {
media_task_runner_->PostTask(FROM_HERE,
base::BindRepeating(init_cb, false));
return;
}
// We don't know much about the media that is coming.
media::VideoDecoderConfig config(
ToVideoCodec(video_codec_type_),
GuessVideoCodecProfile(video_codec_type_), kDefaultPixelFormat,
media::COLOR_SPACE_UNSPECIFIED, media::VIDEO_ROTATION_0, kDefaultSize,
gfx::Rect(kDefaultSize), kDefaultSize, media::EmptyExtraData(),
media::Unencrypted());
// In practice this is ignored by hardware decoders.
bool low_delay = true;
// Encryption is not supported.
media::CdmContext* cdm_context = nullptr;
media::VideoDecoder::WaitingForDecryptionKeyCB waiting_cb;
media::VideoDecoder::OutputCB output_cb =
base::BindRepeating(&RTCVideoDecoderAdapter::OnOutput, weak_this_);
video_decoder_->Initialize(config, low_delay, cdm_context, std::move(init_cb),
std::move(output_cb), std::move(waiting_cb));
}
void RTCVideoDecoderAdapter::DecodeOnMediaThread() {
DVLOG(4) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
int max_decode_requests = video_decoder_->GetMaxDecodeRequests();
while (outstanding_decode_requests_ < max_decode_requests) {
scoped_refptr<media::DecoderBuffer> buffer;
{
base::AutoLock auto_lock(lock_);
// Take the first pending buffer.
if (pending_buffers_.empty())
return;
buffer = pending_buffers_.front();
pending_buffers_.pop_front();
// Record the timestamp.
while (decode_timestamps_.size() >= kMaxDecodeHistory)
decode_timestamps_.pop_front();
decode_timestamps_.push_back(buffer->timestamp());
}
// Submit for decoding.
outstanding_decode_requests_++;
video_decoder_->Decode(
std::move(buffer),
base::BindRepeating(&RTCVideoDecoderAdapter::OnDecodeDone, weak_this_));
}
}
void RTCVideoDecoderAdapter::OnDecodeDone(media::DecodeStatus status) {
DVLOG(3) << __func__ << "(" << status << ")";
DCHECK(media_task_runner_->BelongsToCurrentThread());
outstanding_decode_requests_--;
if (status == media::DecodeStatus::DECODE_ERROR) {
DVLOG(2) << "Entering permanent error state";
UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
media::VideoDecodeAccelerator::PLATFORM_FAILURE,
media::VideoDecodeAccelerator::ERROR_MAX + 1);
base::AutoLock auto_lock(lock_);
has_error_ = true;
pending_buffers_.clear();
decode_timestamps_.clear();
return;
}
DecodeOnMediaThread();
}
void RTCVideoDecoderAdapter::OnOutput(
const scoped_refptr<media::VideoFrame>& frame) {
DVLOG(3) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
webrtc::VideoFrame rtc_frame(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame),
frame->timestamp().InMicroseconds(), 0, webrtc::kVideoRotation_0);
base::AutoLock auto_lock(lock_);
if (std::find(decode_timestamps_.begin(), decode_timestamps_.end(),
frame->timestamp()) == decode_timestamps_.end()) {
DVLOG(2) << "Discarding frame with timestamp " << frame->timestamp();
return;
}
// Assumes that Decoded() can be safely called with the lock held, which
// apparently it can be because RTCVideoDecoder does the same.
DCHECK(decode_complete_callback_);
decode_complete_callback_->Decoded(rtc_frame);
consecutive_error_count_ = 0;
}
} // namespace content
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_RENDERER_MEDIA_WEBRTC_RTC_VIDEO_DECODER_ADAPTER_H_
#define CONTENT_RENDERER_MEDIA_WEBRTC_RTC_VIDEO_DECODER_ADAPTER_H_
#include <memory>
#include "base/callback_forward.h"
#include "base/containers/circular_deque.h"
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
#include "content/common/content_export.h"
#include "media/base/decode_status.h"
#include "media/base/video_codecs.h"
#include "media/base/video_decoder.h"
#include "third_party/webrtc/modules/video_coding/include/video_codec_interface.h"
#include "ui/gfx/geometry/size.h"
namespace base {
class SingleThreadTaskRunner;
} // namespace base
namespace media {
class DecoderBuffer;
class GpuVideoAcceleratorFactories;
class MediaLog;
class VideoFrame;
} // namespace media
namespace content {
// This class decodes video for WebRTC using a media::VideoDecoder. In
// particular, either GpuVideoDecoder or MojoVideoDecoder are used to provide
// access to hardware decoding in the GPU process.
//
// Lifecycle methods are called on the WebRTC worker thread. Decoding happens on
// a WebRTC DecodingThread, which is an rtc::PlatformThread owend by WebRTC; it
// does not have a TaskRunner.
//
// To stop decoding, WebRTC stops the DecodingThread and then calls Release() on
// the worker. Calling the DecodedImageCallback after the DecodingThread is
// stopped is illegal but, because we decode on the media thread, there is no
// way to synchronize this correctly.
class CONTENT_EXPORT RTCVideoDecoderAdapter : public webrtc::VideoDecoder {
public:
using CreateVideoDecoderCB =
base::RepeatingCallback<std::unique_ptr<media::VideoDecoder>(
media::MediaLog*)>;
// Creates and initializes an RTCVideoDecoderAdapter. Returns nullptr if
// |video_codec_type| cannot be supported.
// Called on the worker thread.
static std::unique_ptr<RTCVideoDecoderAdapter> Create(
webrtc::VideoCodecType video_codec_type,
media::GpuVideoAcceleratorFactories* gpu_factories,
CreateVideoDecoderCB create_video_decoder_cb);
// Called on the worker thread.
static void DeleteSoonOnMediaThread(
std::unique_ptr<webrtc::VideoDecoder> rtc_video_decoder_adapter,
media::GpuVideoAcceleratorFactories* gpu_factories);
// Called on |media_task_runner_|.
~RTCVideoDecoderAdapter() override;
// webrtc::VideoDecoder implementation.
// Called on the DecodingThread.
int32_t InitDecode(const webrtc::VideoCodec* codec_settings,
int32_t number_of_cores) override;
// Called on the DecodingThread.
int32_t RegisterDecodeCompleteCallback(
webrtc::DecodedImageCallback* callback) override;
// Called on the DecodingThread.
int32_t Decode(const webrtc::EncodedImage& input_image,
bool missing_frames,
const webrtc::CodecSpecificInfo* codec_specific_info,
int64_t render_time_ms) override;
// Called on the worker thread.
int32_t Release() override;
// Called on the worker thread and on the DecodingThread.
const char* ImplementationName() const override;
private:
// |create_video_decoder_cb| will always be called on |media_task_runner|.
// Called on the worker thread.
RTCVideoDecoderAdapter(
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
CreateVideoDecoderCB create_video_decoder_cb,
webrtc::VideoCodecType video_codec_type);
bool InitializeSync();
void InitializeOnMediaThread(media::VideoDecoder::InitCB init_cb);
void DecodeOnMediaThread();
void OnDecodeDone(media::DecodeStatus status);
void OnOutput(const scoped_refptr<media::VideoFrame>& frame);
// Construction parameters.
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
CreateVideoDecoderCB create_video_decoder_cb_;
webrtc::VideoCodecType video_codec_type_;
// Media thread members.
// |media_log_| must outlive |video_decoder_| because it is passed as a raw
// pointer.
std::unique_ptr<media::MediaLog> media_log_;
std::unique_ptr<media::VideoDecoder> video_decoder_;
int32_t outstanding_decode_requests_ = 0;
// Shared members.
base::Lock lock_;
int32_t consecutive_error_count_ = 0;
bool has_error_ = false;
webrtc::DecodedImageCallback* decode_complete_callback_ = nullptr;
// Requests that have not been submitted to the decoder yet.
base::circular_deque<scoped_refptr<media::DecoderBuffer>> pending_buffers_;
// Record of timestamps that have been sent to be decoded. Removing a
// timestamp will cause the frame to be dropped when it is output.
base::circular_deque<base::TimeDelta> decode_timestamps_;
// Thread management.
THREAD_CHECKER(worker_thread_checker_);
THREAD_CHECKER(decoding_thread_checker_);
base::WeakPtr<RTCVideoDecoderAdapter> weak_this_;
base::WeakPtrFactory<RTCVideoDecoderAdapter> weak_this_factory_;
DISALLOW_COPY_AND_ASSIGN(RTCVideoDecoderAdapter);
};
} // namespace content
#endif // CONTENT_RENDERER_MEDIA_WEBRTC_RTC_VIDEO_DECODER_ADAPTER_H_
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include <vector>
#include <stdint.h>
#include "base/callback_forward.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/memory/scoped_refptr.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_task_environment.h"
#include "base/threading/thread.h"
#include "base/time/time.h"
#include "content/renderer/media/webrtc/rtc_video_decoder_adapter.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "media/base/decode_status.h"
#include "media/base/gmock_callback_support.h"
#include "media/base/media_util.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "media/video/mock_gpu_video_accelerator_factories.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
using ::testing::_;
using ::testing::AtLeast;
using ::testing::DoAll;
using ::testing::Mock;
using ::testing::Return;
using ::testing::SaveArg;
using ::testing::StrictMock;
namespace content {
namespace {
class MockVideoDecoder : public media::VideoDecoder {
public:
std::string GetDisplayName() const override { return "MockVideoDecoder"; }
MOCK_METHOD6(
Initialize,
void(const media::VideoDecoderConfig& config,
bool low_delay,
media::CdmContext* cdm_context,
const InitCB& init_cb,
const OutputCB& output_cb,
const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb));
MOCK_METHOD2(Decode,
void(scoped_refptr<media::DecoderBuffer> buffer,
const DecodeCB&));
MOCK_METHOD1(Reset, void(const base::RepeatingClosure&));
bool NeedsBitstreamConversion() const override { return false; }
bool CanReadWithoutStalling() const override { return true; }
int GetMaxDecodeRequests() const override { return 1; }
};
// Wraps a callback as a webrtc::DecodedImageCallback.
class DecodedImageCallback : public webrtc::DecodedImageCallback {
public:
DecodedImageCallback(
base::RepeatingCallback<void(const webrtc::VideoFrame&)> callback)
: callback_(callback) {}
int32_t Decoded(webrtc::VideoFrame& decodedImage) override {
callback_.Run(decodedImage);
// TODO(sandersd): Does the return value matter? RTCVideoDecoder
// ignores it.
return 0;
}
private:
base::RepeatingCallback<void(const webrtc::VideoFrame&)> callback_;
DISALLOW_COPY_AND_ASSIGN(DecodedImageCallback);
};
} // namespace
class RTCVideoDecoderAdapterTest : public ::testing::Test {
public:
RTCVideoDecoderAdapterTest()
: media_thread_("Media Thread"),
gpu_factories_(nullptr),
decoded_image_callback_(decoded_cb_.Get()) {
media_thread_.Start();
ON_CALL(gpu_factories_, GetTaskRunner())
.WillByDefault(Return(media_thread_.task_runner()));
EXPECT_CALL(gpu_factories_, GetTaskRunner()).Times(AtLeast(0));
owned_video_decoder_ = std::make_unique<StrictMock<MockVideoDecoder>>();
video_decoder_ = owned_video_decoder_.get();
}
~RTCVideoDecoderAdapterTest() {
if (!rtc_video_decoder_adapter_)
return;
RTCVideoDecoderAdapter::DeleteSoonOnMediaThread(
std::move(rtc_video_decoder_adapter_), &gpu_factories_);
media_thread_.FlushForTesting();
}
protected:
std::unique_ptr<media::VideoDecoder> CreateVideoDecoder(
media::MediaLog* media_log) {
DCHECK(owned_video_decoder_);
return std::move(owned_video_decoder_);
}
bool BasicSetup() {
if (!CreateAndInitialize())
return false;
if (InitDecode() != WEBRTC_VIDEO_CODEC_OK)
return false;
if (RegisterDecodeCompleteCallback() != WEBRTC_VIDEO_CODEC_OK)
return false;
return true;
}
bool BasicTeardown() {
if (Release() != WEBRTC_VIDEO_CODEC_OK)
return false;
return true;
}
bool CreateAndInitialize(bool init_cb_result = true) {
EXPECT_CALL(*video_decoder_, Initialize(_, _, _, _, _, _))
.WillOnce(DoAll(SaveArg<4>(&output_cb_),
media::RunCallback<3>(init_cb_result)));
rtc_video_decoder_adapter_ = RTCVideoDecoderAdapter::Create(
webrtc::kVideoCodecVP9, &gpu_factories_,
base::BindRepeating(&RTCVideoDecoderAdapterTest::CreateVideoDecoder,
base::Unretained(this)));
return !!rtc_video_decoder_adapter_;
}
int32_t InitDecode() {
webrtc::VideoCodec codec_settings;
codec_settings.codecType = webrtc::kVideoCodecVP9;
return rtc_video_decoder_adapter_->InitDecode(&codec_settings, 1);
}
int32_t RegisterDecodeCompleteCallback() {
return rtc_video_decoder_adapter_->RegisterDecodeCompleteCallback(
&decoded_image_callback_);
}
int32_t Decode(uint32_t timestamp) {
uint8_t buf[] = {0};
webrtc::EncodedImage input_image(&buf[0], 1, 1);
input_image._completeFrame = true;
input_image._timeStamp = timestamp;
return rtc_video_decoder_adapter_->Decode(input_image, false, nullptr, 0);
}
void FinishDecode(uint32_t timestamp) {
media_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&RTCVideoDecoderAdapterTest::FinishDecodeOnMediaThread,
base::Unretained(this), timestamp));
}
void FinishDecodeOnMediaThread(uint32_t timestamp) {
DCHECK(media_thread_.task_runner()->BelongsToCurrentThread());
gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes];
mailbox_holders[0].mailbox = gpu::Mailbox::Generate();
scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::WrapNativeTextures(
media::PIXEL_FORMAT_ARGB, mailbox_holders,
media::VideoFrame::ReleaseMailboxCB(), gfx::Size(640, 360),
gfx::Rect(640, 360), gfx::Size(640, 360),
base::TimeDelta::FromMicroseconds(timestamp));
output_cb_.Run(std::move(frame));
}
int32_t Release() { return rtc_video_decoder_adapter_->Release(); }
base::test::ScopedTaskEnvironment scoped_task_environment_;
base::Thread media_thread_;
// Owned by |rtc_video_decoder_adapter_|.
StrictMock<MockVideoDecoder>* video_decoder_ = nullptr;
StrictMock<base::MockCallback<
base::RepeatingCallback<void(const webrtc::VideoFrame&)>>>
decoded_cb_;
private:
StrictMock<media::MockGpuVideoAcceleratorFactories> gpu_factories_;
std::unique_ptr<RTCVideoDecoderAdapter> rtc_video_decoder_adapter_;
std::unique_ptr<StrictMock<MockVideoDecoder>> owned_video_decoder_;
DecodedImageCallback decoded_image_callback_;
media::VideoDecoder::OutputCB output_cb_;
DISALLOW_COPY_AND_ASSIGN(RTCVideoDecoderAdapterTest);
};
TEST_F(RTCVideoDecoderAdapterTest, Lifecycle) {
ASSERT_TRUE(BasicSetup());
ASSERT_TRUE(BasicTeardown());
}
TEST_F(RTCVideoDecoderAdapterTest, InitializationFailure) {
ASSERT_FALSE(CreateAndInitialize(false));
}
TEST_F(RTCVideoDecoderAdapterTest, Decode) {
ASSERT_TRUE(BasicSetup());
EXPECT_CALL(*video_decoder_, Decode(_, _))
.WillOnce(media::RunCallback<1>(media::DecodeStatus::OK));
ASSERT_EQ(Decode(0), WEBRTC_VIDEO_CODEC_OK);
EXPECT_CALL(decoded_cb_, Run(_));
FinishDecode(0);
media_thread_.FlushForTesting();
}
TEST_F(RTCVideoDecoderAdapterTest, Decode_Error) {
ASSERT_TRUE(BasicSetup());
EXPECT_CALL(*video_decoder_, Decode(_, _))
.WillOnce(media::RunCallback<1>(media::DecodeStatus::DECODE_ERROR));
ASSERT_EQ(Decode(0), WEBRTC_VIDEO_CODEC_OK);
media_thread_.FlushForTesting();
ASSERT_EQ(Decode(1), WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
}
TEST_F(RTCVideoDecoderAdapterTest, Decode_Hang_Short) {
ASSERT_TRUE(BasicSetup());
// Ignore Decode() calls.
EXPECT_CALL(*video_decoder_, Decode(_, _)).Times(AtLeast(1));
for (int counter = 0; counter < 10; counter++) {
int32_t result = Decode(counter);
if (result == WEBRTC_VIDEO_CODEC_ERROR) {
ASSERT_GT(counter, 2);
return;
}
media_thread_.FlushForTesting();
}
FAIL();
}
TEST_F(RTCVideoDecoderAdapterTest, Decode_Hang_Long) {
ASSERT_TRUE(BasicSetup());
// Ignore Decode() calls.
EXPECT_CALL(*video_decoder_, Decode(_, _)).Times(AtLeast(1));
for (int counter = 0; counter < 100; counter++) {
int32_t result = Decode(counter);
if (result == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE) {
ASSERT_GT(counter, 10);
return;
}
media_thread_.FlushForTesting();
}
FAIL();
}
} // namespace content
...@@ -1724,6 +1724,7 @@ test("content_unittests") { ...@@ -1724,6 +1724,7 @@ test("content_unittests") {
"../renderer/media/webrtc/rtc_rtp_sender_unittest.cc", "../renderer/media/webrtc/rtc_rtp_sender_unittest.cc",
"../renderer/media/webrtc/rtc_rtp_transceiver_unittest.cc", "../renderer/media/webrtc/rtc_rtp_transceiver_unittest.cc",
"../renderer/media/webrtc/rtc_stats_unittest.cc", "../renderer/media/webrtc/rtc_stats_unittest.cc",
"../renderer/media/webrtc/rtc_video_decoder_adapter_unittest.cc",
"../renderer/media/webrtc/rtc_video_decoder_unittest.cc", "../renderer/media/webrtc/rtc_video_decoder_unittest.cc",
"../renderer/media/webrtc/rtc_video_encoder_unittest.cc", "../renderer/media/webrtc/rtc_video_encoder_unittest.cc",
"../renderer/media/webrtc/stun_field_trial_unittest.cc", "../renderer/media/webrtc/stun_field_trial_unittest.cc",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment