Commit 559f9b0c authored by Dale Curtis's avatar Dale Curtis Committed by Chromium LUCI CQ

Support non-HDR high bit depth content in D3D11VideoDecoder.

Previously we assumed all HDR content is 10 bit and all SDR content
is 8-bit; this generally works well, but now that the accelerators
have a GetBitDepth() method, we should use it.

This change utilizes the same mechanism we do for size changes: we
get a kConfigChange event on the first decode which has the real
size and bit depth information. We still assuem HDR is 10 bit and
SDR is 8bit at decoder creation, but if we're wrong we'll now fix
that choice during the first config change rather than silently
emitting P010 as NV12 /grumble.

R=liberato

Fixed: 1115846
Test: Updated unittests.
Change-Id: Ifa56c65c2fd7bb82d404f6a73a4369b347a7d1df
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2590395
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Auto-Submit: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: default avatarFrank Liberato <liberato@chromium.org>
Cr-Commit-Position: refs/heads/master@{#836920}
parent 9e82e9f4
...@@ -36,23 +36,18 @@ std::unique_ptr<D3D11DecoderConfigurator> D3D11DecoderConfigurator::Create( ...@@ -36,23 +36,18 @@ std::unique_ptr<D3D11DecoderConfigurator> D3D11DecoderConfigurator::Create(
const gpu::GpuPreferences& gpu_preferences, const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& workarounds, const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config, const VideoDecoderConfig& config,
uint8_t bit_depth,
MediaLog* media_log) { MediaLog* media_log) {
bool supports_nv12_decode_swap_chain = const bool supports_nv12_decode_swap_chain =
gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported(); gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported();
const auto decoder_dxgi_format =
// Note: All profiles of AV1 can contain 8 or 10 bit content. The professional bit_depth == 8 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
// profile may contain up to 12-bits. Eventually we will need a way to defer
// the output format selection until we've parsed the bitstream.
DXGI_FORMAT decoder_dxgi_format =
config.color_space_info().ToGfxColorSpace().IsHDR() ? DXGI_FORMAT_P010
: DXGI_FORMAT_NV12;
GUID decoder_guid = {}; GUID decoder_guid = {};
if (config.codec() == kCodecH264) { if (config.codec() == kCodecH264) {
decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT; decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT;
} else if (config.profile() == VP9PROFILE_PROFILE0) { } else if (config.profile() == VP9PROFILE_PROFILE0) {
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0; decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0;
} else if (config.profile() == VP9PROFILE_PROFILE2) { } else if (config.profile() == VP9PROFILE_PROFILE2) {
decoder_dxgi_format = DXGI_FORMAT_P010; // Profile2 is always 10-bit.
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2; decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2;
} else if (config.profile() == AV1PROFILE_PROFILE_MAIN) { } else if (config.profile() == AV1PROFILE_PROFILE_MAIN) {
decoder_guid = DXVA_ModeAV1_VLD_Profile0; decoder_guid = DXVA_ModeAV1_VLD_Profile0;
......
...@@ -36,6 +36,7 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator { ...@@ -36,6 +36,7 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
const gpu::GpuPreferences& gpu_preferences, const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& workarounds, const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config, const VideoDecoderConfig& config,
uint8_t bit_depth,
MediaLog* media_log); MediaLog* media_log);
bool SupportsDevice(ComD3D11VideoDevice video_device); bool SupportsDevice(ComD3D11VideoDevice video_device);
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include "media/base/media_util.h" #include "media/base/media_util.h"
#include "media/base/win/d3d11_mocks.h" #include "media/base/win/d3d11_mocks.h"
#include "media/gpu/windows/av1_guids.h"
#include "media/gpu/windows/d3d11_decoder_configurator.h" #include "media/gpu/windows/d3d11_decoder_configurator.h"
#include "testing/gmock/include/gmock/gmock.h" #include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
...@@ -35,14 +36,15 @@ class D3D11DecoderConfiguratorUnittest : public ::testing::Test { ...@@ -35,14 +36,15 @@ class D3D11DecoderConfiguratorUnittest : public ::testing::Test {
std::unique_ptr<D3D11DecoderConfigurator> CreateWithDefaultGPUInfo( std::unique_ptr<D3D11DecoderConfigurator> CreateWithDefaultGPUInfo(
const VideoDecoderConfig& config, const VideoDecoderConfig& config,
bool zero_copy_enabled = true) { bool zero_copy_enabled = true,
uint8_t bit_depth = 8) {
gpu::GpuPreferences prefs; gpu::GpuPreferences prefs;
prefs.enable_zero_copy_dxgi_video = zero_copy_enabled; prefs.enable_zero_copy_dxgi_video = zero_copy_enabled;
gpu::GpuDriverBugWorkarounds workarounds; gpu::GpuDriverBugWorkarounds workarounds;
workarounds.disable_dxgi_zero_copy_video = false; workarounds.disable_dxgi_zero_copy_video = false;
auto media_log = std::make_unique<NullMediaLog>(); auto media_log = std::make_unique<NullMediaLog>();
return D3D11DecoderConfigurator::Create(prefs, workarounds, config, return D3D11DecoderConfigurator::Create(prefs, workarounds, config,
media_log.get()); bit_depth, media_log.get());
} }
}; };
...@@ -57,13 +59,25 @@ TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile0RightFormats) { ...@@ -57,13 +59,25 @@ TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile0RightFormats) {
TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile2RightFormats) { TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile2RightFormats) {
auto configurator = CreateWithDefaultGPUInfo( auto configurator = CreateWithDefaultGPUInfo(
CreateDecoderConfig(VP9PROFILE_PROFILE2, {0, 0}, false), false); CreateDecoderConfig(VP9PROFILE_PROFILE2, {0, 0}, false), false, 10);
EXPECT_EQ(configurator->DecoderGuid(), EXPECT_EQ(configurator->DecoderGuid(),
D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2); D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2);
EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_P010); EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_P010);
} }
TEST_F(D3D11DecoderConfiguratorUnittest, AV1ProfileRightFormats) {
auto configurator = CreateWithDefaultGPUInfo(
CreateDecoderConfig(AV1PROFILE_PROFILE_MAIN, {0, 0}, false), false, 8);
EXPECT_EQ(configurator->DecoderGuid(), DXVA_ModeAV1_VLD_Profile0);
EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_NV12);
configurator = CreateWithDefaultGPUInfo(
CreateDecoderConfig(AV1PROFILE_PROFILE_MAIN, {0, 0}, false), false, 10);
EXPECT_EQ(configurator->DecoderGuid(), DXVA_ModeAV1_VLD_Profile0);
EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_P010);
}
TEST_F(D3D11DecoderConfiguratorUnittest, SupportsDeviceNoProfiles) { TEST_F(D3D11DecoderConfiguratorUnittest, SupportsDeviceNoProfiles) {
auto configurator = CreateWithDefaultGPUInfo( auto configurator = CreateWithDefaultGPUInfo(
CreateDecoderConfig(VP9PROFILE_PROFILE0, {0, 0}, false)); CreateDecoderConfig(VP9PROFILE_PROFILE0, {0, 0}, false));
......
...@@ -195,12 +195,22 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder( ...@@ -195,12 +195,22 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
StatusOr<std::tuple<ComD3D11VideoDecoder>> StatusOr<std::tuple<ComD3D11VideoDecoder>>
D3D11VideoDecoder::CreateD3D11Decoder() { D3D11VideoDecoder::CreateD3D11Decoder() {
HRESULT hr; // By default we assume outputs are 8-bit for SDR color spaces and 10 bit for
// HDR color spaces (or VP9.2). We'll get a config change once we know the
// real bit depth if this turns out to be wrong.
bit_depth_ =
accelerated_video_decoder_
? accelerated_video_decoder_->GetBitDepth()
: (config_.profile() == VP9PROFILE_PROFILE2 ||
config_.color_space_info().ToGfxColorSpace().IsHDR()
? 10
: 8);
// TODO: supported check? // TODO: supported check?
decoder_configurator_ = D3D11DecoderConfigurator::Create( decoder_configurator_ =
gpu_preferences_, gpu_workarounds_, config_, media_log_.get()); D3D11DecoderConfigurator::Create(gpu_preferences_, gpu_workarounds_,
config_, bit_depth_, media_log_.get());
if (!decoder_configurator_) if (!decoder_configurator_)
return StatusCode::kDecoderUnsupportedProfile; return StatusCode::kDecoderUnsupportedProfile;
...@@ -226,7 +236,7 @@ D3D11VideoDecoder::CreateD3D11Decoder() { ...@@ -226,7 +236,7 @@ D3D11VideoDecoder::CreateD3D11Decoder() {
return StatusCode::kCreateTextureSelectorFailed; return StatusCode::kCreateTextureSelectorFailed;
UINT config_count = 0; UINT config_count = 0;
hr = video_device_->GetVideoDecoderConfigCount( auto hr = video_device_->GetVideoDecoderConfigCount(
decoder_configurator_->DecoderDescriptor(), &config_count); decoder_configurator_->DecoderDescriptor(), &config_count);
if (FAILED(hr)) { if (FAILED(hr)) {
return Status(StatusCode::kGetDecoderConfigCountFailed) return Status(StatusCode::kGetDecoderConfigCountFailed)
...@@ -600,6 +610,7 @@ void D3D11VideoDecoder::DoDecode() { ...@@ -600,6 +610,7 @@ void D3D11VideoDecoder::DoDecode() {
// Otherwise, stop here. We'll restart when a picture comes back. // Otherwise, stop here. We'll restart when a picture comes back.
if (picture_buffers_.size()) if (picture_buffers_.size())
return; return;
CreatePictureBuffers(); CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kConfigChange) { } else if (result == media::AcceleratedVideoDecoder::kConfigChange) {
// Before the first frame, we get a config change that we should ignore. // Before the first frame, we get a config change that we should ignore.
...@@ -608,10 +619,12 @@ void D3D11VideoDecoder::DoDecode() { ...@@ -608,10 +619,12 @@ void D3D11VideoDecoder::DoDecode() {
// don't, so that init can fail rather than decoding if there's a problem // don't, so that init can fail rather than decoding if there's a problem
// creating it. We could also unconditionally re-allocate the decoder, // creating it. We could also unconditionally re-allocate the decoder,
// but we keep it if it's ready to go. // but we keep it if it's ready to go.
const auto new_bit_depth = accelerated_video_decoder_->GetBitDepth();
const auto new_profile = accelerated_video_decoder_->GetProfile(); const auto new_profile = accelerated_video_decoder_->GetProfile();
const auto new_coded_size = accelerated_video_decoder_->GetPicSize(); const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
if (new_profile == config_.profile() && if (new_profile == config_.profile() &&
new_coded_size == config_.coded_size()) { new_coded_size == config_.coded_size() &&
new_bit_depth == bit_depth_) {
continue; continue;
} }
......
...@@ -300,6 +300,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder, ...@@ -300,6 +300,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
// accelerator. Needed for config changes. // accelerator. Needed for config changes.
SetAcceleratorDecoderCB set_accelerator_decoder_cb_; SetAcceleratorDecoderCB set_accelerator_decoder_cb_;
// The currently configured bit depth for the decoder. When this changes we
// need to recreate the decoder.
uint8_t bit_depth_ = 8u;
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this}; base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder); DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment