Commit 2f702ab1 authored by Chunbo Hua's avatar Chunbo Hua Committed by Commit Bot

Enable H.264 hardware MFT encoder on Windows

Currently Microsoft H.264 encoder MFT is always used as WebRTC external
encoder without hardware acceleration. This change enables H.264
hardware MFT encoder on Windows.

Bug: 982799
Change-Id: Ia33812508034daa99dd3dc1fb64b83cb1d7c8465
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1777521
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: default avatarTommi <tommi@chromium.org>
Reviewed-by: default avatarDale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#745539}
parent 7a742ea7
...@@ -187,6 +187,7 @@ Chris Tserng <tserng@amazon.com> ...@@ -187,6 +187,7 @@ Chris Tserng <tserng@amazon.com>
Chris Vasselli <clindsay@gmail.com> Chris Vasselli <clindsay@gmail.com>
Christophe Dumez <ch.dumez@samsung.com> Christophe Dumez <ch.dumez@samsung.com>
Christopher Dale <chrelad@gmail.com> Christopher Dale <chrelad@gmail.com>
Chunbo Hua <chunbo.hua@intel.com>
Claudio DeSouza <claudiomdsjr@gmail.com> Claudio DeSouza <claudiomdsjr@gmail.com>
Clemens Fruhwirth <clemens@endorphin.org> Clemens Fruhwirth <clemens@endorphin.org>
Clement Scheelfeldt Skau <clementskau@gmail.com> Clement Scheelfeldt Skau <clementskau@gmail.com>
......
...@@ -40,9 +40,8 @@ const size_t kMaxResolutionWidth = 1920; ...@@ -40,9 +40,8 @@ const size_t kMaxResolutionWidth = 1920;
const size_t kMaxResolutionHeight = 1088; const size_t kMaxResolutionHeight = 1088;
const size_t kNumInputBuffers = 3; const size_t kNumInputBuffers = 3;
// Media Foundation uses 100 nanosecond units for time, see // Media Foundation uses 100 nanosecond units for time, see
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms697282(v=vs.85).aspx // https://msdn.microsoft.com/en-us/library/windows/desktop/ms697282(v=vs.85).aspx.
const size_t kOneMicrosecondInMFSampleTimeUnits = 10; const size_t kOneMicrosecondInMFSampleTimeUnits = 10;
const size_t kOutputSampleBufferSizeRatio = 4;
constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = { constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = {
L"mf.dll", L"mfplat.dll", L"mf.dll", L"mfplat.dll",
...@@ -108,8 +107,10 @@ struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef { ...@@ -108,8 +107,10 @@ struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef {
MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator( MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator(
bool compatible_with_win7) bool compatible_with_win7)
: compatible_with_win7_(compatible_with_win7), : compatible_with_win7_(compatible_with_win7),
input_required_(false),
main_client_task_runner_(base::ThreadTaskRunnerHandle::Get()), main_client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
encoder_thread_("MFEncoderThread") {} encoder_thread_("MFEncoderThread"),
encoder_task_weak_factory_(this) {}
MediaFoundationVideoEncodeAccelerator:: MediaFoundationVideoEncodeAccelerator::
~MediaFoundationVideoEncodeAccelerator() { ~MediaFoundationVideoEncodeAccelerator() {
...@@ -132,18 +133,20 @@ MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() { ...@@ -132,18 +133,20 @@ MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator; frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
input_visible_size_ = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight); input_visible_size_ = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
if (!CreateHardwareEncoderMFT() || !SetEncoderModes() || if (!CreateHardwareEncoderMFT() || !SetEncoderModes() ||
!InitializeInputOutputSamples(H264PROFILE_BASELINE)) { !InitializeInputOutputParameters(H264PROFILE_BASELINE)) {
ReleaseEncoderResources(); ReleaseEncoderResources();
DVLOG(1) DVLOG(1)
<< "Hardware encode acceleration is not available on this platform."; << "Hardware encode acceleration is not available on this platform.";
return profiles; return profiles;
} }
gfx::Size highest_supported_resolution = input_visible_size_; gfx::Size highest_supported_resolution = input_visible_size_;
for (const auto& resolution : kOptionalMaxResolutions) { for (const auto& resolution : kOptionalMaxResolutions) {
DCHECK_GT(resolution.GetArea(), highest_supported_resolution.GetArea()); DCHECK_GT(resolution.GetArea(), highest_supported_resolution.GetArea());
if (!IsResolutionSupported(resolution)) if (!IsResolutionSupported(resolution)) {
break; break;
}
highest_supported_resolution = resolution; highest_supported_resolution = resolution;
} }
ReleaseEncoderResources(); ReleaseEncoderResources();
...@@ -200,27 +203,13 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config, ...@@ -200,27 +203,13 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator; frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
target_bitrate_ = config.initial_bitrate; target_bitrate_ = config.initial_bitrate;
bitstream_buffer_size_ = config.input_visible_size.GetArea(); bitstream_buffer_size_ = config.input_visible_size.GetArea();
u_plane_offset_ =
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane,
input_visible_size_)
.GetArea();
v_plane_offset_ = u_plane_offset_ + VideoFrame::PlaneSize(PIXEL_FORMAT_I420,
VideoFrame::kUPlane,
input_visible_size_)
.GetArea();
y_stride_ = VideoFrame::RowBytes(VideoFrame::kYPlane, PIXEL_FORMAT_I420,
input_visible_size_.width());
u_stride_ = VideoFrame::RowBytes(VideoFrame::kUPlane, PIXEL_FORMAT_I420,
input_visible_size_.width());
v_stride_ = VideoFrame::RowBytes(VideoFrame::kVPlane, PIXEL_FORMAT_I420,
input_visible_size_.width());
if (!SetEncoderModes()) { if (!SetEncoderModes()) {
DLOG(ERROR) << "Failed setting encoder parameters."; DLOG(ERROR) << "Failed setting encoder parameters.";
return false; return false;
} }
if (!InitializeInputOutputSamples(config.output_profile)) { if (!InitializeInputOutputParameters(config.output_profile)) {
DLOG(ERROR) << "Failed initializing input-output samples."; DLOG(ERROR) << "Failed initializing input-output samples.";
return false; return false;
} }
...@@ -232,25 +221,28 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config, ...@@ -232,25 +221,28 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
input_sample_ = CreateEmptySampleWithBuffer( input_sample_ = CreateEmptySampleWithBuffer(
input_stream_info.cbSize input_stream_info.cbSize
? input_stream_info.cbSize ? input_stream_info.cbSize
: VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size_), : VideoFrame::AllocationSize(PIXEL_FORMAT_NV12, input_visible_size_),
input_stream_info.cbAlignment); input_stream_info.cbAlignment);
MFT_OUTPUT_STREAM_INFO output_stream_info;
hr = encoder_->GetOutputStreamInfo(output_stream_id_, &output_stream_info);
RETURN_ON_HR_FAILURE(hr, "Couldn't get output stream info", false);
output_sample_ = CreateEmptySampleWithBuffer(
output_stream_info.cbSize
? output_stream_info.cbSize
: bitstream_buffer_size_ * kOutputSampleBufferSizeRatio,
output_stream_info.cbAlignment);
hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL);
RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false);
main_client_task_runner_->PostTask( main_client_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&Client::RequireBitstreamBuffers, main_client_, FROM_HERE, base::BindOnce(&Client::RequireBitstreamBuffers, main_client_,
kNumInputBuffers, input_visible_size_, kNumInputBuffers, input_visible_size_,
bitstream_buffer_size_)); bitstream_buffer_size_));
hr = encoder_->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
RETURN_ON_HR_FAILURE(
hr, "Couldn't set ProcessMessage MFT_MESSAGE_COMMAND_FLUSH", false);
hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
RETURN_ON_HR_FAILURE(
hr, "Couldn't set ProcessMessage MFT_MESSAGE_NOTIFY_BEGIN_STREAMING",
false);
hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
RETURN_ON_HR_FAILURE(
hr, "Couldn't set ProcessMessage MFT_MESSAGE_NOTIFY_START_OF_STREAM",
false);
hr = encoder_->QueryInterface(IID_PPV_ARGS(&event_generator_));
RETURN_ON_HR_FAILURE(hr, "Couldn't get event generator", false);
return SUCCEEDED(hr); return SUCCEEDED(hr);
} }
...@@ -359,8 +351,9 @@ bool MediaFoundationVideoEncodeAccelerator::CreateHardwareEncoderMFT() { ...@@ -359,8 +351,9 @@ bool MediaFoundationVideoEncodeAccelerator::CreateHardwareEncoderMFT() {
} }
} }
if (!(session_ = InitializeMediaFoundation())) if (!(session_ = InitializeMediaFoundation())) {
return false; return false;
}
uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER; uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER;
MFT_REGISTER_TYPE_INFO input_info; MFT_REGISTER_TYPE_INFO input_info;
...@@ -370,32 +363,97 @@ bool MediaFoundationVideoEncodeAccelerator::CreateHardwareEncoderMFT() { ...@@ -370,32 +363,97 @@ bool MediaFoundationVideoEncodeAccelerator::CreateHardwareEncoderMFT() {
output_info.guidMajorType = MFMediaType_Video; output_info.guidMajorType = MFMediaType_Video;
output_info.guidSubtype = MFVideoFormat_H264; output_info.guidSubtype = MFVideoFormat_H264;
base::win::ScopedCoMem<CLSID> CLSIDs;
uint32_t count = 0; uint32_t count = 0;
HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info, base::win::ScopedCoMem<IMFActivate*> pp_activate;
&output_info, NULL, &CLSIDs, &count); HRESULT hr = MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info,
&output_info, &pp_activate, &count);
RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false);
RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); RETURN_ON_FAILURE((count > 0), "No hardware encoder found", false);
DVLOG(3) << "HW encoder(s) found: " << count; DVLOG(3) << "Hardware encoder(s) found: " << count;
hr = ::CoCreateInstance(CLSIDs[0], nullptr, CLSCTX_ALL,
IID_PPV_ARGS(&encoder_)); // Try to create the encoder with priority according to merit value.
hr = E_FAIL;
for (UINT32 i = 0; i < count; i++) {
if (FAILED(hr)) {
DCHECK(!encoder_);
DCHECK(!activate_);
hr = pp_activate[i]->ActivateObject(IID_PPV_ARGS(&encoder_));
if (encoder_.Get() != nullptr) {
DCHECK(SUCCEEDED(hr));
activate_ = pp_activate[i];
pp_activate[i] = nullptr;
// Print the friendly name.
base::win::ScopedCoMem<WCHAR> friendly_name;
UINT32 name_length;
activate_->GetAllocatedString(MFT_FRIENDLY_NAME_Attribute,
&friendly_name, &name_length);
DVLOG(3) << "Selected hardware encoder's friendly name: "
<< friendly_name;
} else {
DCHECK(FAILED(hr));
// The component that calls ActivateObject is
// responsible for calling ShutdownObject,
// https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nf-mfobjects-imfactivate-shutdownobject.
pp_activate[i]->ShutdownObject();
}
}
// Release the enumerated instances. According to Windows Dev Center,
// https://docs.microsoft.com/en-us/windows/win32/api/mfapi/nf-mfapi-mftenumex
// The caller must release the pointers.
if (pp_activate[i]) {
pp_activate[i]->Release();
pp_activate[i] = nullptr;
}
}
RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false);
RETURN_ON_FAILURE((encoder_.Get() != nullptr), RETURN_ON_FAILURE((encoder_.Get() != nullptr),
"No HW encoder instance created", false); "No hardware encoder instance created", false);
Microsoft::WRL::ComPtr<IMFAttributes> all_attributes;
hr = encoder_->GetAttributes(&all_attributes);
if (SUCCEEDED(hr)) {
// An asynchronous MFT must support dynamic format changes,
// https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#format-changes.
UINT32 dynamic = FALSE;
hr = all_attributes->GetUINT32(MFT_SUPPORT_DYNAMIC_FORMAT_CHANGE, &dynamic);
if (!dynamic) {
DLOG(ERROR) << "Couldn't support dynamic format change.";
return false;
}
// Unlock the selected asynchronous MFTs,
// https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#unlocking-asynchronous-mfts.
UINT32 async = FALSE;
hr = all_attributes->GetUINT32(MF_TRANSFORM_ASYNC, &async);
if (!async) {
DLOG(ERROR) << "MFT encoder is not asynchronous.";
return false;
}
hr = all_attributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
RETURN_ON_HR_FAILURE(hr, "Couldn't unlock transform async", false);
}
return true; return true;
} }
bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples( bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputParameters(
VideoCodecProfile output_profile) { VideoCodecProfile output_profile) {
DCHECK(main_client_task_runner_->BelongsToCurrentThread()); DCHECK(main_client_task_runner_->BelongsToCurrentThread());
DCHECK(encoder_);
DWORD input_count = 0; DWORD input_count = 0;
DWORD output_count = 0; DWORD output_count = 0;
HRESULT hr = encoder_->GetStreamCount(&input_count, &output_count); HRESULT hr = encoder_->GetStreamCount(&input_count, &output_count);
RETURN_ON_HR_FAILURE(hr, "Couldn't get stream count", false); RETURN_ON_HR_FAILURE(hr, "Couldn't get stream count", false);
if (input_count < 1 || output_count < 1) { if (input_count < 1 || output_count < 1) {
LOG(ERROR) << "Stream count too few: input " << input_count << ", output " DLOG(ERROR) << "Stream count too few: input " << input_count << ", output "
<< output_count; << output_count;
return false; return false;
} }
...@@ -410,13 +468,13 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples( ...@@ -410,13 +468,13 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples(
input_stream_id_ = 0; input_stream_id_ = 0;
output_stream_id_ = 0; output_stream_id_ = 0;
} else { } else {
LOG(ERROR) << "Couldn't find stream ids."; DLOG(ERROR) << "Couldn't find stream ids from hardware encoder.";
return false; return false;
} }
// Initialize output parameters. // Initialize output parameters.
hr = MFCreateMediaType(&imf_output_media_type_); hr = MFCreateMediaType(&imf_output_media_type_);
RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); RETURN_ON_HR_FAILURE(hr, "Couldn't create output media type", false);
hr = imf_output_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); hr = imf_output_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false);
hr = imf_output_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); hr = imf_output_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
...@@ -442,10 +500,10 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples( ...@@ -442,10 +500,10 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples(
// Initialize input parameters. // Initialize input parameters.
hr = MFCreateMediaType(&imf_input_media_type_); hr = MFCreateMediaType(&imf_input_media_type_);
RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); RETURN_ON_HR_FAILURE(hr, "Couldn't create input media type", false);
hr = imf_input_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); hr = imf_input_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false);
hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false);
hr = MFSetAttributeRatio(imf_input_media_type_.Get(), MF_MT_FRAME_RATE, hr = MFSetAttributeRatio(imf_input_media_type_.Get(), MF_MT_FRAME_RATE,
frame_rate_, 1); frame_rate_, 1);
...@@ -465,11 +523,11 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples( ...@@ -465,11 +523,11 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples(
bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() {
DCHECK(main_client_task_runner_->BelongsToCurrentThread()); DCHECK(main_client_task_runner_->BelongsToCurrentThread());
RETURN_ON_FAILURE((encoder_.Get() != nullptr), DCHECK(encoder_);
"No HW encoder instance created", false);
HRESULT hr = encoder_.As(&codec_api_); HRESULT hr = encoder_.As(&codec_api_);
RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false);
VARIANT var; VARIANT var;
var.vt = VT_UI4; var.vt = VT_UI4;
var.ulVal = eAVEncCommonRateControlMode_CBR; var.ulVal = eAVEncCommonRateControlMode_CBR;
...@@ -481,21 +539,37 @@ bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { ...@@ -481,21 +539,37 @@ bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() {
// setting it on Windows 7 returns error. // setting it on Windows 7 returns error.
RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false);
} }
if (S_OK ==
codec_api_->IsModifiable(&CODECAPI_AVEncVideoTemporalLayerCount)) {
var.ulVal = 1;
hr = codec_api_->SetValue(&CODECAPI_AVEncVideoTemporalLayerCount, &var);
if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set temporal layer count", false);
}
}
var.ulVal = target_bitrate_; var.ulVal = target_bitrate_;
hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
if (!compatible_with_win7_) { if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false);
} }
var.ulVal = eAVEncAdaptiveMode_Resolution;
hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); if (S_OK == codec_api_->IsModifiable(&CODECAPI_AVEncAdaptiveMode)) {
if (!compatible_with_win7_) { var.ulVal = eAVEncAdaptiveMode_Resolution;
RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set adaptive mode", false);
}
} }
var.vt = VT_BOOL;
var.boolVal = VARIANT_TRUE; if (S_OK == codec_api_->IsModifiable(&CODECAPI_AVLowLatencyMode)) {
hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); var.vt = VT_BOOL;
if (!compatible_with_win7_) { var.boolVal = VARIANT_TRUE;
RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var);
if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set low latency mode", false);
}
} }
return true; return true;
...@@ -538,27 +612,79 @@ void MediaFoundationVideoEncodeAccelerator::EncodeTask( ...@@ -538,27 +612,79 @@ void MediaFoundationVideoEncodeAccelerator::EncodeTask(
DVLOG(3) << __func__; DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
bool input_delivered = false;
if (input_required_) {
// HMFT is waiting for this coming input.
ProcessInput(frame, force_keyframe);
input_delivered = true;
input_required_ = false;
} else {
Microsoft::WRL::ComPtr<IMFMediaEvent> media_event;
HRESULT hr =
event_generator_->GetEvent(MF_EVENT_FLAG_NO_WAIT, &media_event);
if (FAILED(hr)) {
DLOG(WARNING) << "Abandoned input frame for video encoder.";
return;
}
MediaEventType event_type;
hr = media_event->GetType(&event_type);
if (FAILED(hr)) {
DLOG(ERROR) << "Failed to get the type of media event.";
return;
}
// Always deliver the current input into HMFT.
if (event_type == METransformNeedInput) {
ProcessInput(frame, force_keyframe);
input_delivered = true;
} else if (event_type == METransformHaveOutput) {
ProcessOutput();
input_delivered =
TryToDeliverInputFrame(std::move(frame), force_keyframe);
}
}
if (!input_delivered) {
DLOG(ERROR) << "Failed to deliver input frame to video encoder";
return;
}
TryToReturnBitstreamBuffer();
}
void MediaFoundationVideoEncodeAccelerator::ProcessInput(
scoped_refptr<VideoFrame> frame,
bool force_keyframe) {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
// Convert I420 to NV12 as input.
Microsoft::WRL::ComPtr<IMFMediaBuffer> input_buffer; Microsoft::WRL::ComPtr<IMFMediaBuffer> input_buffer;
input_sample_->GetBufferByIndex(0, &input_buffer); input_sample_->GetBufferByIndex(0, &input_buffer);
{ {
MediaBufferScopedPointer scoped_buffer(input_buffer.Get()); MediaBufferScopedPointer scoped_buffer(input_buffer.Get());
DCHECK(scoped_buffer.get()); DCHECK(scoped_buffer.get());
libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), int dst_stride_y = frame->stride(VideoFrame::kYPlane);
frame->stride(VideoFrame::kYPlane), uint8_t* dst_uv =
frame->visible_data(VideoFrame::kVPlane), scoped_buffer.get() +
frame->stride(VideoFrame::kVPlane), frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane);
frame->visible_data(VideoFrame::kUPlane), int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), libyuv::I420ToNV12(frame->visible_data(VideoFrame::kYPlane),
y_stride_, scoped_buffer.get() + u_plane_offset_, frame->stride(VideoFrame::kYPlane),
u_stride_, scoped_buffer.get() + v_plane_offset_, frame->visible_data(VideoFrame::kUPlane),
v_stride_, input_visible_size_.width(), frame->stride(VideoFrame::kUPlane),
input_visible_size_.height()); frame->visible_data(VideoFrame::kVPlane),
frame->stride(VideoFrame::kVPlane), scoped_buffer.get(),
dst_stride_y, dst_uv, dst_stride_uv,
input_visible_size_.width(),
input_visible_size_.height());
} }
input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() *
kOneMicrosecondInMFSampleTimeUnits); kOneMicrosecondInMFSampleTimeUnits);
UINT64 sample_duration = 1; UINT64 sample_duration = 0;
HRESULT hr = HRESULT hr =
MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration); MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration);
RETURN_ON_HR_FAILURE(hr, "Couldn't calculate sample duration", ); RETURN_ON_HR_FAILURE(hr, "Couldn't calculate sample duration", );
...@@ -572,80 +698,70 @@ void MediaFoundationVideoEncodeAccelerator::EncodeTask( ...@@ -572,80 +698,70 @@ void MediaFoundationVideoEncodeAccelerator::EncodeTask(
var.vt = VT_UI4; var.vt = VT_UI4;
var.ulVal = 1; var.ulVal = 1;
hr = codec_api_->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var); hr = codec_api_->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var);
if (!compatible_with_win7_ && !SUCCEEDED(hr)) { if (!compatible_with_win7_ && FAILED(hr)) {
LOG(WARNING) << "Failed to set CODECAPI_AVEncVideoForceKeyFrame, " LOG(WARNING) << "Failed to set CODECAPI_AVEncVideoForceKeyFrame, "
"HRESULT: 0x" << std::hex << hr; "HRESULT: 0x" << std::hex << hr;
} }
} }
hr = encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0); hr = encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0);
// According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try if (FAILED(hr)) {
// processing the output. This error indicates that encoder does not accept
// any more input data.
if (hr == MF_E_NOTACCEPTING) {
DVLOG(3) << "MF_E_NOTACCEPTING";
ProcessOutput();
hr = encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0);
if (!SUCCEEDED(hr)) {
NotifyError(kPlatformFailureError);
RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
}
} else if (!SUCCEEDED(hr)) {
NotifyError(kPlatformFailureError); NotifyError(kPlatformFailureError);
RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
} }
DVLOG(3) << "Sent for encode " << hr;
ProcessOutput(); DVLOG(3) << "Sent for encode " << hr;
} }
void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
DVLOG(3) << __func__; DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DWORD output_status = 0;
HRESULT hr = encoder_->GetOutputStatus(&output_status);
RETURN_ON_HR_FAILURE(hr, "Couldn't get output status", );
if (output_status != MFT_OUTPUT_STATUS_SAMPLE_READY) {
DVLOG(3) << "Output isnt ready";
return;
}
MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
output_data_buffer.dwStreamID = 0; output_data_buffer.dwStreamID = output_stream_id_;
output_data_buffer.dwStatus = 0; output_data_buffer.dwStatus = 0;
output_data_buffer.pEvents = NULL; output_data_buffer.pEvents = nullptr;
output_data_buffer.pSample = output_sample_.Get(); output_data_buffer.pSample = nullptr;
DWORD status = 0; DWORD status = 0;
hr = encoder_->ProcessOutput(output_stream_id_, 1, &output_data_buffer, HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status);
&status); if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { hr = S_OK;
DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT" << status; Microsoft::WRL::ComPtr<IMFMediaType> media_type;
for (DWORD type_index = 0; SUCCEEDED(hr); ++type_index) {
hr = encoder_->GetOutputAvailableType(output_stream_id_, type_index,
&media_type);
if (SUCCEEDED(hr)) {
break;
}
}
hr = encoder_->SetOutputType(output_stream_id_, media_type.Get(), 0);
return; return;
} }
RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", );
DVLOG(3) << "Got encoded data " << hr; DVLOG(3) << "Got encoded data " << hr;
Microsoft::WRL::ComPtr<IMFMediaBuffer> output_buffer; Microsoft::WRL::ComPtr<IMFMediaBuffer> output_buffer;
hr = output_sample_->GetBufferByIndex(0, &output_buffer); hr = output_data_buffer.pSample->GetBufferByIndex(0, &output_buffer);
RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", );
DWORD size = 0; DWORD size = 0;
hr = output_buffer->GetCurrentLength(&size); hr = output_buffer->GetCurrentLength(&size);
RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", );
base::TimeDelta timestamp; base::TimeDelta timestamp;
LONGLONG sample_time; LONGLONG sample_time;
hr = output_sample_->GetSampleTime(&sample_time); hr = output_data_buffer.pSample->GetSampleTime(&sample_time);
if (SUCCEEDED(hr)) { if (SUCCEEDED(hr)) {
timestamp = base::TimeDelta::FromMicroseconds( timestamp = base::TimeDelta::FromMicroseconds(
sample_time / kOneMicrosecondInMFSampleTimeUnits); sample_time / kOneMicrosecondInMFSampleTimeUnits);
} }
const bool keyframe = MFGetAttributeUINT32( const bool keyframe = MFGetAttributeUINT32(
output_sample_.Get(), MFSampleExtension_CleanPoint, false); output_data_buffer.pSample, MFSampleExtension_CleanPoint, false);
DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " DVLOG(3) << "Encoded data with size:" << size << " keyframe " << keyframe;
<< keyframe;
// If no bit stream buffer presents, queue the output first.
if (bitstream_buffer_queue_.empty()) { if (bitstream_buffer_queue_.empty()) {
DVLOG(3) << "No bitstream buffers."; DVLOG(3) << "No bitstream buffers.";
// We need to copy the output so that encoding can continue. // We need to copy the output so that encoding can continue.
...@@ -656,9 +772,12 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { ...@@ -656,9 +772,12 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
memcpy(encode_output->memory(), scoped_buffer.get(), size); memcpy(encode_output->memory(), scoped_buffer.get(), size);
} }
encoder_output_queue_.push_back(std::move(encode_output)); encoder_output_queue_.push_back(std::move(encode_output));
output_data_buffer.pSample->Release();
output_data_buffer.pSample = nullptr;
return; return;
} }
// Immediately return encoded buffer with BitstreamBuffer to client.
std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
buffer_ref = std::move(bitstream_buffer_queue_.front()); buffer_ref = std::move(bitstream_buffer_queue_.front());
bitstream_buffer_queue_.pop_front(); bitstream_buffer_queue_.pop_front();
...@@ -668,15 +787,88 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { ...@@ -668,15 +787,88 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
memcpy(buffer_ref->mapping.memory(), scoped_buffer.get(), size); memcpy(buffer_ref->mapping.memory(), scoped_buffer.get(), size);
} }
output_data_buffer.pSample->Release();
output_data_buffer.pSample = nullptr;
main_client_task_runner_->PostTask( main_client_task_runner_->PostTask(
FROM_HERE, FROM_HERE,
base::BindOnce(&Client::BitstreamBufferReady, main_client_, base::BindOnce(&Client::BitstreamBufferReady, main_client_,
buffer_ref->id, buffer_ref->id,
BitstreamBufferMetadata(size, keyframe, timestamp))); BitstreamBufferMetadata(size, keyframe, timestamp)));
}
bool MediaFoundationVideoEncodeAccelerator::TryToDeliverInputFrame(
scoped_refptr<VideoFrame> frame,
bool force_keyframe) {
bool input_delivered = false;
Microsoft::WRL::ComPtr<IMFMediaEvent> media_event;
MediaEventType event_type;
do {
HRESULT hr =
event_generator_->GetEvent(MF_EVENT_FLAG_NO_WAIT, &media_event);
if (FAILED(hr)) {
break;
}
hr = media_event->GetType(&event_type);
if (FAILED(hr)) {
DLOG(ERROR) << "Failed to get the type of media event.";
break;
}
switch (event_type) {
case METransformHaveOutput: {
ProcessOutput();
continue;
}
case METransformNeedInput: {
ProcessInput(frame, force_keyframe);
return true;
}
default:
break;
}
} while (true);
return input_delivered;
}
void MediaFoundationVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
// Try to fetch the encoded frame in time.
bool output_processed = false;
do {
Microsoft::WRL::ComPtr<IMFMediaEvent> media_event;
MediaEventType event_type;
HRESULT hr =
event_generator_->GetEvent(MF_EVENT_FLAG_NO_WAIT, &media_event);
if (FAILED(hr)) {
if (!output_processed) {
continue;
} else {
break;
}
}
// Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT hr = media_event->GetType(&event_type);
// is returned to flush out all the output. if (FAILED(hr)) {
ProcessOutput(); DLOG(ERROR) << "Failed to get the type of media event.";
break;
}
switch (event_type) {
case METransformHaveOutput: {
ProcessOutput();
output_processed = true;
break;
}
case METransformNeedInput: {
input_required_ = true;
continue;
}
default:
break;
}
} while (true);
} }
void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask( void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
...@@ -689,31 +881,22 @@ void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask( ...@@ -689,31 +881,22 @@ void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput> std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput>
encode_output = std::move(encoder_output_queue_.front()); encode_output = std::move(encoder_output_queue_.front());
encoder_output_queue_.pop_front(); encoder_output_queue_.pop_front();
ReturnBitstreamBuffer(std::move(encode_output), std::move(buffer_ref)); memcpy(buffer_ref->mapping.memory(), encode_output->memory(),
encode_output->size());
main_client_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&Client::BitstreamBufferReady, main_client_,
buffer_ref->id,
BitstreamBufferMetadata(
encode_output->size(), encode_output->keyframe,
encode_output->capture_timestamp)));
return; return;
} }
bitstream_buffer_queue_.push_back(std::move(buffer_ref)); bitstream_buffer_queue_.push_back(std::move(buffer_ref));
} }
void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer(
std::unique_ptr<EncodeOutput> encode_output,
std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
buffer_ref) {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
memcpy(buffer_ref->mapping.memory(), encode_output->memory(),
encode_output->size());
main_client_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&Client::BitstreamBufferReady, main_client_,
buffer_ref->id,
BitstreamBufferMetadata(
encode_output->size(), encode_output->keyframe,
encode_output->capture_timestamp)));
}
void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask( void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
uint32_t bitrate, uint32_t bitrate,
uint32_t framerate) { uint32_t framerate) {
...@@ -732,7 +915,7 @@ void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask( ...@@ -732,7 +915,7 @@ void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
var.ulVal = target_bitrate_; var.ulVal = target_bitrate_;
HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
if (!compatible_with_win7_) { if (!compatible_with_win7_) {
RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); RETURN_ON_HR_FAILURE(hr, "Couldn't update bitrate", );
} }
} }
} }
...@@ -748,12 +931,22 @@ void MediaFoundationVideoEncodeAccelerator::DestroyTask() { ...@@ -748,12 +931,22 @@ void MediaFoundationVideoEncodeAccelerator::DestroyTask() {
} }
void MediaFoundationVideoEncodeAccelerator::ReleaseEncoderResources() { void MediaFoundationVideoEncodeAccelerator::ReleaseEncoderResources() {
while (!bitstream_buffer_queue_.empty())
bitstream_buffer_queue_.pop_front();
while (!encoder_output_queue_.empty())
encoder_output_queue_.pop_front();
if (activate_.Get() != nullptr) {
activate_->ShutdownObject();
activate_->Release();
activate_.Reset();
}
encoder_.Reset(); encoder_.Reset();
codec_api_.Reset(); codec_api_.Reset();
event_generator_.Reset();
imf_input_media_type_.Reset(); imf_input_media_type_.Reset();
imf_output_media_type_.Reset(); imf_output_media_type_.Reset();
input_sample_.Reset(); input_sample_.Reset();
output_sample_.Reset();
} }
} // namespace content } // namespace media
...@@ -65,8 +65,8 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator ...@@ -65,8 +65,8 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Creates an hardware encoder backed IMFTransform instance on |encoder_|. // Creates an hardware encoder backed IMFTransform instance on |encoder_|.
bool CreateHardwareEncoderMFT(); bool CreateHardwareEncoderMFT();
// Initializes and allocates memory for input and output samples. // Initializes and allocates memory for input and output parameters.
bool InitializeInputOutputSamples(VideoCodecProfile output_profile); bool InitializeInputOutputParameters(VideoCodecProfile output_profile);
// Initializes encoder parameters for real-time use. // Initializes encoder parameters for real-time use.
bool SetEncoderModes(); bool SetEncoderModes();
...@@ -82,20 +82,23 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator ...@@ -82,20 +82,23 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Encoding tasks to be run on |encoder_thread_|. // Encoding tasks to be run on |encoder_thread_|.
void EncodeTask(scoped_refptr<VideoFrame> frame, bool force_keyframe); void EncodeTask(scoped_refptr<VideoFrame> frame, bool force_keyframe);
// Processes the input video frame for the encoder.
void ProcessInput(scoped_refptr<VideoFrame> frame, bool force_keyframe);
// Checks for and copies encoded output on |encoder_thread_|. // Checks for and copies encoded output on |encoder_thread_|.
void ProcessOutput(); void ProcessOutput();
// Tries to deliver the input frame to the encoder.
bool TryToDeliverInputFrame(scoped_refptr<VideoFrame> frame,
bool force_keyframe);
// Tries to return a bitstream buffer to the client.
void TryToReturnBitstreamBuffer();
// Inserts the output buffers for reuse on |encoder_thread_|. // Inserts the output buffers for reuse on |encoder_thread_|.
void UseOutputBitstreamBufferTask( void UseOutputBitstreamBufferTask(
std::unique_ptr<BitstreamBufferRef> buffer_ref); std::unique_ptr<BitstreamBufferRef> buffer_ref);
// Copies EncodeOutput into a BitstreamBuffer and returns it to the
// |main_client_|.
void ReturnBitstreamBuffer(
std::unique_ptr<EncodeOutput> encode_output,
std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
buffer_ref);
// Changes encode parameters on |encoder_thread_|. // Changes encode parameters on |encoder_thread_|.
void RequestEncodingParametersChangeTask(uint32_t bitrate, void RequestEncodingParametersChangeTask(uint32_t bitrate,
uint32_t framerate); uint32_t framerate);
...@@ -119,14 +122,11 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator ...@@ -119,14 +122,11 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
size_t bitstream_buffer_size_; size_t bitstream_buffer_size_;
uint32_t frame_rate_; uint32_t frame_rate_;
uint32_t target_bitrate_; uint32_t target_bitrate_;
size_t u_plane_offset_;
size_t v_plane_offset_;
size_t y_stride_;
size_t u_stride_;
size_t v_stride_;
Microsoft::WRL::ComPtr<IMFActivate> activate_;
Microsoft::WRL::ComPtr<IMFTransform> encoder_; Microsoft::WRL::ComPtr<IMFTransform> encoder_;
Microsoft::WRL::ComPtr<ICodecAPI> codec_api_; Microsoft::WRL::ComPtr<ICodecAPI> codec_api_;
Microsoft::WRL::ComPtr<IMFMediaEventGenerator> event_generator_;
DWORD input_stream_id_; DWORD input_stream_id_;
DWORD output_stream_id_; DWORD output_stream_id_;
...@@ -134,8 +134,8 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator ...@@ -134,8 +134,8 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
Microsoft::WRL::ComPtr<IMFMediaType> imf_input_media_type_; Microsoft::WRL::ComPtr<IMFMediaType> imf_input_media_type_;
Microsoft::WRL::ComPtr<IMFMediaType> imf_output_media_type_; Microsoft::WRL::ComPtr<IMFMediaType> imf_output_media_type_;
bool input_required_;
Microsoft::WRL::ComPtr<IMFSample> input_sample_; Microsoft::WRL::ComPtr<IMFSample> input_sample_;
Microsoft::WRL::ComPtr<IMFSample> output_sample_;
// MediaFoundation session. // MediaFoundation session.
MFSessionLifetime session_; MFSessionLifetime session_;
...@@ -155,7 +155,7 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator ...@@ -155,7 +155,7 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Declared last to ensure that all weak pointers are invalidated before // Declared last to ensure that all weak pointers are invalidated before
// other destructors run. // other destructors run.
base::WeakPtrFactory<MediaFoundationVideoEncodeAccelerator> base::WeakPtrFactory<MediaFoundationVideoEncodeAccelerator>
encoder_task_weak_factory_{this}; encoder_task_weak_factory_;
DISALLOW_COPY_AND_ASSIGN(MediaFoundationVideoEncodeAccelerator); DISALLOW_COPY_AND_ASSIGN(MediaFoundationVideoEncodeAccelerator);
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment