Commit 09c211ae authored by Emircan Uysaler's avatar Emircan Uysaler Committed by Commit Bot

Reset VDA for HDR Color space changes

VDA implementations require config.container_color_space and
config.target_color_space information to be set to provide correct output. For
WebRTC frames that carry this information for VP9.2, we can reinitialize VDA
with this info for the correct colors.

Bug: 939387
Change-Id: Ib2ef4c7fdfbeef5e6f25a887e32a5c9c4c2e170e
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1476065
Commit-Queue: Emircan Uysaler <emircan@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Cr-Commit-Position: refs/heads/master@{#638644}
parent df8868a0
......@@ -106,8 +106,7 @@ bool BrowserGpuVideoAcceleratorFactories::IsDecoderConfigSupported(
std::unique_ptr<media::VideoDecoder>
BrowserGpuVideoAcceleratorFactories::CreateVideoDecoder(
media::MediaLog* media_log,
const media::RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) {
const media::RequestOverlayInfoCB& request_overlay_info_cb) {
return nullptr;
}
......
......@@ -28,8 +28,7 @@ class BrowserGpuVideoAcceleratorFactories
const media::VideoDecoderConfig& config) override;
std::unique_ptr<media::VideoDecoder> CreateVideoDecoder(
media::MediaLog* media_log,
const media::RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) override;
const media::RequestOverlayInfoCB& request_overlay_info_cb) override;
std::unique_ptr<media::VideoDecodeAccelerator> CreateVideoDecodeAccelerator()
override;
std::unique_ptr<media::VideoEncodeAccelerator> CreateVideoEncodeAccelerator()
......
......@@ -209,8 +209,7 @@ bool GpuVideoAcceleratorFactoriesImpl::IsDecoderConfigSupported(
std::unique_ptr<media::VideoDecoder>
GpuVideoAcceleratorFactoriesImpl::CreateVideoDecoder(
media::MediaLog* media_log,
const media::RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) {
const media::RequestOverlayInfoCB& request_overlay_info_cb) {
DCHECK(video_accelerator_enabled_);
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(interface_factory_.is_bound());
......@@ -223,12 +222,12 @@ GpuVideoAcceleratorFactoriesImpl::CreateVideoDecoder(
interface_factory_->CreateVideoDecoder(mojo::MakeRequest(&video_decoder));
return std::make_unique<media::MojoVideoDecoder>(
task_runner_, this, media_log, std::move(video_decoder),
request_overlay_info_cb, target_color_space);
request_overlay_info_cb, rendering_color_space_);
}
#endif // BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
return std::make_unique<media::GpuVideoDecoder>(
this, request_overlay_info_cb, target_color_space, media_log);
this, request_overlay_info_cb, rendering_color_space_, media_log);
}
std::unique_ptr<media::VideoDecodeAccelerator>
......
......@@ -71,8 +71,7 @@ class CONTENT_EXPORT GpuVideoAcceleratorFactoriesImpl
int32_t GetCommandBufferRouteId() override;
std::unique_ptr<media::VideoDecoder> CreateVideoDecoder(
media::MediaLog* media_log,
const media::RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) override;
const media::RequestOverlayInfoCB& request_overlay_info_cb) override;
bool IsDecoderConfigSupported(
const media::VideoDecoderConfig& config) override;
std::unique_ptr<media::VideoDecodeAccelerator> CreateVideoDecodeAccelerator()
......
......@@ -6,6 +6,7 @@
#include <algorithm>
#include <functional>
#include <utility>
#include "base/bind.h"
#include "base/bind_helpers.h"
......@@ -22,10 +23,10 @@
#include "build/build_config.h"
#include "content/renderer/media/render_media_log.h"
#include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
#include "content/renderer/media/webrtc/webrtc_video_utils.h"
#include "media/base/media_log.h"
#include "media/base/media_util.h"
#include "media/base/overlay_info.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_types.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "third_party/webrtc/api/video/video_frame.h"
......@@ -159,7 +160,8 @@ std::unique_ptr<RTCVideoDecoderAdapter> RTCVideoDecoderAdapter::Create(
// Synchronously verify that the decoder can be initialized.
std::unique_ptr<RTCVideoDecoderAdapter> rtc_video_decoder_adapter =
base::WrapUnique(new RTCVideoDecoderAdapter(gpu_factories, format));
base::WrapUnique(
new RTCVideoDecoderAdapter(gpu_factories, config, format));
if (!rtc_video_decoder_adapter->InitializeSync(config)) {
gpu_factories->GetTaskRunner()->DeleteSoon(
FROM_HERE, std::move(rtc_video_decoder_adapter));
......@@ -171,10 +173,12 @@ std::unique_ptr<RTCVideoDecoderAdapter> RTCVideoDecoderAdapter::Create(
RTCVideoDecoderAdapter::RTCVideoDecoderAdapter(
media::GpuVideoAcceleratorFactories* gpu_factories,
const media::VideoDecoderConfig& config,
const webrtc::SdpVideoFormat& format)
: media_task_runner_(gpu_factories->GetTaskRunner()),
gpu_factories_(gpu_factories),
format_(format),
config_(config),
weak_this_factory_(this) {
DVLOG(1) << __func__;
DETACH_FROM_THREAD(decoding_thread_checker_);
......@@ -189,7 +193,8 @@ RTCVideoDecoderAdapter::~RTCVideoDecoderAdapter() {
bool RTCVideoDecoderAdapter::InitializeSync(
const media::VideoDecoderConfig& config) {
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(worker_thread_checker_);
// Can be called on |worker_thread_| or |decoding_thread_|.
DCHECK(!media_task_runner_->BelongsToCurrentThread());
bool result = false;
base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
......@@ -249,6 +254,15 @@ int32_t RTCVideoDecoderAdapter::Decode(
buffer->set_timestamp(
base::TimeDelta::FromMicroseconds(input_image.Timestamp()));
if (ShouldReinitializeForSettingHDRColorSpace(input_image)) {
config_.set_color_space_info(
WebRtcToMediaVideoColorSpace(*input_image.ColorSpace()));
if (!ReinitializeSync(config_))
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
if (input_image._frameType != webrtc::kVideoFrameKey)
return WEBRTC_VIDEO_CODEC_ERROR;
}
// Queue for decoding.
{
base::AutoLock auto_lock(lock_);
......@@ -306,18 +320,21 @@ void RTCVideoDecoderAdapter::InitializeOnMediaThread(
DVLOG(3) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
// TODO(sandersd): Plumb a real log sink here so that we can contribute to the
// media-internals UI. The current log just discards all messages.
// On ReinitializeSync() calls, |video_decoder_| may already be set.
if (!video_decoder_) {
// TODO(sandersd): Plumb a real log sink here so that we can contribute to
// the media-internals UI. The current log just discards all messages.
media_log_ = std::make_unique<media::NullMediaLog>();
video_decoder_ = gpu_factories_->CreateVideoDecoder(
media_log_.get(), base::BindRepeating(&OnRequestOverlayInfo),
gfx::ColorSpace());
media_log_.get(), base::BindRepeating(&OnRequestOverlayInfo));
if (!video_decoder_) {
media_task_runner_->PostTask(FROM_HERE,
base::BindRepeating(init_cb, false));
return;
}
}
// In practice this is ignored by hardware decoders.
bool low_delay = true;
......@@ -407,4 +424,67 @@ void RTCVideoDecoderAdapter::OnOutput(
consecutive_error_count_ = 0;
}
bool RTCVideoDecoderAdapter::ShouldReinitializeForSettingHDRColorSpace(
const webrtc::EncodedImage& input_image) const {
DCHECK_CALLED_ON_VALID_THREAD(decoding_thread_checker_);
if (config_.profile() == media::VP9PROFILE_PROFILE2 &&
input_image.ColorSpace()) {
const media::VideoColorSpace& new_color_space =
WebRtcToMediaVideoColorSpace(*input_image.ColorSpace());
if (!config_.color_space_info().IsSpecified() ||
new_color_space != config_.color_space_info()) {
return true;
}
}
return false;
}
bool RTCVideoDecoderAdapter::ReinitializeSync(
const media::VideoDecoderConfig& config) {
DCHECK_CALLED_ON_VALID_THREAD(decoding_thread_checker_);
bool result = false;
base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
media::VideoDecoder::InitCB init_cb =
base::BindRepeating(&FinishWait, &waiter, &result);
FlushDoneCB flush_success_cb =
base::BindOnce(&RTCVideoDecoderAdapter::InitializeOnMediaThread,
weak_this_, std::cref(config), std::cref(init_cb));
FlushDoneCB flush_fail_cb =
base::BindOnce(&FinishWait, &waiter, &result, false);
if (media_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&RTCVideoDecoderAdapter::FlushOnMediaThread,
weak_this_, std::move(flush_success_cb),
std::move(flush_fail_cb)))) {
waiter.Wait();
}
return result;
}
void RTCVideoDecoderAdapter::FlushOnMediaThread(FlushDoneCB flush_success_cb,
FlushDoneCB flush_fail_cb) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
// Remove any pending tasks.
{
base::AutoLock auto_lock(lock_);
pending_buffers_.clear();
}
// Send EOS frame for flush.
video_decoder_->Decode(
media::DecoderBuffer::CreateEOSBuffer(),
base::BindRepeating(
[](FlushDoneCB flush_success, FlushDoneCB flush_fail,
media::DecodeStatus status) {
if (status == media::DecodeStatus::OK)
std::move(flush_success).Run();
else
std::move(flush_fail).Run();
},
base::Passed(&flush_success_cb), base::Passed(&flush_fail_cb)));
}
} // namespace content
......@@ -18,6 +18,7 @@
#include "media/base/decode_status.h"
#include "media/base/video_codecs.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/modules/video_coding/include/video_codec_interface.h"
#include "ui/gfx/geometry/size.h"
......@@ -30,7 +31,6 @@ namespace media {
class DecoderBuffer;
class GpuVideoAcceleratorFactories;
class MediaLog;
class VideoDecoderConfig;
class VideoFrame;
} // namespace media
......@@ -81,9 +81,11 @@ class CONTENT_EXPORT RTCVideoDecoderAdapter : public webrtc::VideoDecoder {
using CreateVideoDecoderCB =
base::RepeatingCallback<std::unique_ptr<media::VideoDecoder>(
media::MediaLog*)>;
using FlushDoneCB = base::OnceCallback<void()>;
// Called on the worker thread.
RTCVideoDecoderAdapter(media::GpuVideoAcceleratorFactories* gpu_factories,
const media::VideoDecoderConfig& config,
const webrtc::SdpVideoFormat& format);
bool InitializeSync(const media::VideoDecoderConfig& config);
......@@ -93,10 +95,17 @@ class CONTENT_EXPORT RTCVideoDecoderAdapter : public webrtc::VideoDecoder {
void OnDecodeDone(media::DecodeStatus status);
void OnOutput(const scoped_refptr<media::VideoFrame>& frame);
bool ShouldReinitializeForSettingHDRColorSpace(
const webrtc::EncodedImage& input_image) const;
bool ReinitializeSync(const media::VideoDecoderConfig& config);
void FlushOnMediaThread(FlushDoneCB flush_success_cb,
FlushDoneCB flush_fail_cb);
// Construction parameters.
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
media::GpuVideoAcceleratorFactories* gpu_factories_;
webrtc::SdpVideoFormat format_;
media::VideoDecoderConfig config_;
// Media thread members.
// |media_log_| must outlive |video_decoder_| because it is passed as a raw
......
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <memory>
#include <utility>
#include <vector>
#include <stdint.h>
......@@ -30,6 +31,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/webrtc/api/video_codecs/video_codec.h"
#include "third_party/webrtc/media/base/vp9_profile.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
......@@ -91,6 +93,8 @@ class RTCVideoDecoderAdapterTest : public ::testing::Test {
RTCVideoDecoderAdapterTest()
: media_thread_("Media Thread"),
gpu_factories_(nullptr),
sdp_format_(webrtc::SdpVideoFormat(
webrtc::CodecTypeToPayloadString(webrtc::kVideoCodecVP9))),
decoded_image_callback_(decoded_cb_.Get()) {
media_thread_.Start();
......@@ -105,15 +109,14 @@ class RTCVideoDecoderAdapterTest : public ::testing::Test {
.WillByDefault(Return(true));
EXPECT_CALL(gpu_factories_, IsDecoderConfigSupported(_)).Times(AtLeast(0));
ON_CALL(gpu_factories_, CreateVideoDecoder(_, _, _))
ON_CALL(gpu_factories_, CreateVideoDecoder(_, _))
.WillByDefault(
[this](media::MediaLog* media_log,
const media::RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) {
const media::RequestOverlayInfoCB& request_overlay_info_cb) {
DCHECK(this->owned_video_decoder_);
return std::move(this->owned_video_decoder_);
});
EXPECT_CALL(gpu_factories_, CreateVideoDecoder(_, _, _)).Times(AtLeast(0));
EXPECT_CALL(gpu_factories_, CreateVideoDecoder(_, _)).Times(AtLeast(0));
}
~RTCVideoDecoderAdapterTest() {
......@@ -144,12 +147,10 @@ class RTCVideoDecoderAdapterTest : public ::testing::Test {
bool CreateAndInitialize(bool init_cb_result = true) {
EXPECT_CALL(*video_decoder_, Initialize(_, _, _, _, _, _))
.WillOnce(DoAll(SaveArg<4>(&output_cb_),
.WillOnce(DoAll(SaveArg<0>(&vda_config_), SaveArg<4>(&output_cb_),
media::RunCallback<3>(init_cb_result)));
rtc_video_decoder_adapter_ = RTCVideoDecoderAdapter::Create(
&gpu_factories_,
webrtc::SdpVideoFormat(
webrtc::CodecTypeToPayloadString(webrtc::kVideoCodecVP9)));
rtc_video_decoder_adapter_ =
RTCVideoDecoderAdapter::Create(&gpu_factories_, sdp_format_);
return !!rtc_video_decoder_adapter_;
}
......@@ -194,6 +195,25 @@ class RTCVideoDecoderAdapterTest : public ::testing::Test {
int32_t Release() { return rtc_video_decoder_adapter_->Release(); }
webrtc::EncodedImage GetEncodedImageWithColorSpace(uint8_t* buf,
uint32_t timestamp) {
webrtc::EncodedImage input_image(buf, 1, 1);
input_image._completeFrame = true;
input_image._frameType = webrtc::kVideoFrameKey;
input_image.SetTimestamp(timestamp);
webrtc::ColorSpace webrtc_color_space;
webrtc_color_space.set_primaries_from_uint8(1);
webrtc_color_space.set_transfer_from_uint8(1);
webrtc_color_space.set_matrix_from_uint8(1);
webrtc_color_space.set_range_from_uint8(1);
input_image.SetColorSpace(webrtc_color_space);
return input_image;
}
void SetSdpFormat(const webrtc::SdpVideoFormat& sdp_format) {
sdp_format_ = sdp_format;
}
base::test::ScopedTaskEnvironment scoped_task_environment_;
base::Thread media_thread_;
......@@ -205,9 +225,11 @@ class RTCVideoDecoderAdapterTest : public ::testing::Test {
decoded_cb_;
StrictMock<media::MockGpuVideoAcceleratorFactories> gpu_factories_;
media::VideoDecoderConfig vda_config_;
std::unique_ptr<RTCVideoDecoderAdapter> rtc_video_decoder_adapter_;
private:
webrtc::SdpVideoFormat sdp_format_;
std::unique_ptr<StrictMock<MockVideoDecoder>> owned_video_decoder_;
DecodedImageCallback decoded_image_callback_;
media::VideoDecoder::OutputCB output_cb_;
......@@ -301,4 +323,80 @@ TEST_F(RTCVideoDecoderAdapterTest, Decode_Hang_Long) {
FAIL();
}
TEST_F(RTCVideoDecoderAdapterTest, ReinitializesForHDRColorSpaceInitially) {
SetSdpFormat(webrtc::SdpVideoFormat(
"VP9", {{webrtc::kVP9FmtpProfileId,
webrtc::VP9ProfileToString(webrtc::VP9Profile::kProfile2)}}));
ASSERT_TRUE(BasicSetup());
EXPECT_EQ(media::VP9PROFILE_PROFILE2, vda_config_.profile());
EXPECT_FALSE(vda_config_.color_space_info().IsSpecified());
uint8_t buf[] = {0};
// Decode() is expected to be called for EOS flush as well.
EXPECT_CALL(*video_decoder_, Decode(_, _))
.Times(3)
.WillRepeatedly(media::RunCallback<1>(media::DecodeStatus::OK));
EXPECT_CALL(decoded_cb_, Run(_)).Times(2);
// First Decode() should cause a reinitialize as new color space is given.
EXPECT_CALL(*video_decoder_, Initialize(_, _, _, _, _, _))
.WillOnce(DoAll(SaveArg<0>(&vda_config_), media::RunCallback<3>(true)));
webrtc::EncodedImage first_input_image =
GetEncodedImageWithColorSpace(&buf[0], 0);
ASSERT_EQ(
rtc_video_decoder_adapter_->Decode(first_input_image, false, nullptr, 0),
WEBRTC_VIDEO_CODEC_OK);
media_thread_.FlushForTesting();
EXPECT_TRUE(vda_config_.color_space_info().IsSpecified());
FinishDecode(0);
media_thread_.FlushForTesting();
// Second Decode() with same params should happen normally.
webrtc::EncodedImage second_input_image =
GetEncodedImageWithColorSpace(&buf[0], 1);
ASSERT_EQ(
rtc_video_decoder_adapter_->Decode(second_input_image, false, nullptr, 0),
WEBRTC_VIDEO_CODEC_OK);
FinishDecode(1);
media_thread_.FlushForTesting();
}
TEST_F(RTCVideoDecoderAdapterTest, HandlesReinitializeFailure) {
SetSdpFormat(webrtc::SdpVideoFormat(
"VP9", {{webrtc::kVP9FmtpProfileId,
webrtc::VP9ProfileToString(webrtc::VP9Profile::kProfile2)}}));
ASSERT_TRUE(BasicSetup());
EXPECT_EQ(media::VP9PROFILE_PROFILE2, vda_config_.profile());
EXPECT_FALSE(vda_config_.color_space_info().IsSpecified());
uint8_t buf[] = {0};
webrtc::EncodedImage input_image = GetEncodedImageWithColorSpace(&buf[0], 0);
// Decode() is expected to be called for EOS flush as well.
EXPECT_CALL(*video_decoder_, Decode(_, _))
.WillOnce(media::RunCallback<1>(media::DecodeStatus::OK));
// Set Initialize() to fail.
EXPECT_CALL(*video_decoder_, Initialize(_, _, _, _, _, _))
.WillOnce(media::RunCallback<3>(false));
ASSERT_EQ(rtc_video_decoder_adapter_->Decode(input_image, false, nullptr, 0),
WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
}
TEST_F(RTCVideoDecoderAdapterTest, HandlesFlushFailure) {
SetSdpFormat(webrtc::SdpVideoFormat(
"VP9", {{webrtc::kVP9FmtpProfileId,
webrtc::VP9ProfileToString(webrtc::VP9Profile::kProfile2)}}));
ASSERT_TRUE(BasicSetup());
EXPECT_EQ(media::VP9PROFILE_PROFILE2, vda_config_.profile());
EXPECT_FALSE(vda_config_.color_space_info().IsSpecified());
uint8_t buf[] = {0};
webrtc::EncodedImage input_image = GetEncodedImageWithColorSpace(&buf[0], 0);
// Decode() is expected to be called for EOS flush, set to fail.
EXPECT_CALL(*video_decoder_, Decode(_, _))
.WillOnce(media::RunCallback<1>(media::DecodeStatus::ABORTED));
ASSERT_EQ(rtc_video_decoder_adapter_->Decode(input_image, false, nullptr, 0),
WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
}
} // namespace content
......@@ -83,6 +83,11 @@ VideoDecoderConfig::VideoDecoderConfig(const VideoDecoderConfig& other) =
VideoDecoderConfig::~VideoDecoderConfig() = default;
void VideoDecoderConfig::set_color_space_info(
const VideoColorSpace& color_space) {
color_space_info_ = color_space;
}
const VideoColorSpace& VideoDecoderConfig::color_space_info() const {
return color_space_info_;
}
......
......@@ -137,6 +137,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
}
// Color space of the image data.
void set_color_space_info(const VideoColorSpace& color_space);
const VideoColorSpace& color_space_info() const;
// Dynamic range of the image data.
......
......@@ -277,11 +277,17 @@ void VdaVideoDecoder::Initialize(const VideoDecoderConfig& config,
false;
#endif
// Hardware decoders require ColorSpace to be set beforehand to provide
// correct HDR output.
const bool is_hdr_color_space_change =
config_.profile() == media::VP9PROFILE_PROFILE2 &&
config_.color_space_info() != config.color_space_info();
// The configuration is supported.
config_ = config;
if (reinitializing) {
if (is_profile_change) {
if (is_profile_change || is_hdr_color_space_change) {
MEDIA_LOG(INFO, media_log_) << "Reinitializing video decode accelerator "
<< "for profile change";
gpu_task_runner_->PostTask(
......
......@@ -89,8 +89,7 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual std::unique_ptr<media::VideoDecoder> CreateVideoDecoder(
MediaLog* media_log,
const RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) = 0;
const RequestOverlayInfoCB& request_overlay_info_cb) = 0;
// Caller owns returned pointer, but should call Destroy() on it (instead of
// directly deleting) for proper destruction, as per the
......
......@@ -37,10 +37,10 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
MOCK_METHOD0(GetCommandBufferRouteId, int32_t());
MOCK_METHOD1(IsDecoderConfigSupported, bool(const VideoDecoderConfig&));
MOCK_METHOD3(CreateVideoDecoder,
MOCK_METHOD2(
CreateVideoDecoder,
std::unique_ptr<media::VideoDecoder>(MediaLog*,
const RequestOverlayInfoCB&,
const gfx::ColorSpace&));
const RequestOverlayInfoCB&));
// CreateVideo{Decode,Encode}Accelerator returns scoped_ptr, which the mocking
// framework does not want. Trampoline them.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment