Commit 26c242a7 authored by Réda Housni Alaoui's avatar Réda Housni Alaoui Committed by Commit Bot

Fix depth camera MediaFoundation video capture failure

Depth camera video capture is working with DirectShow but
not with MediaFoundation. This comes from the fact that
IMFCaptureEngine is unable to perform encoding/decoding on non standard
MediaFoundation formats Y16, Z16 and INVZ.

To fix it, formats Y16, Z16 and INZV are set equally in the source and
the preview sink.

Bug: 730068
Change-Id: I66bd25f9b8796e4ed564df1e3a9867dcbd9f9522
Reviewed-on: https://chromium-review.googlesource.com/918001Reviewed-by: default avatarAleksandar Stojiljkovic <aleksandar.stojiljkovic@intel.com>
Reviewed-by: default avatarChristian Fremerey <chfremer@chromium.org>
Commit-Queue: Christian Fremerey <chfremer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#536908}
parent b423a9eb
...@@ -9,36 +9,6 @@ ...@@ -9,36 +9,6 @@
namespace media { namespace media {
// Define GUID for I420. This is the color format we would like to support but
// it is not defined in the DirectShow SDK.
// http://msdn.microsoft.com/en-us/library/dd757532.aspx
// 30323449-0000-0010-8000-00AA00389B71.
GUID kMediaSubTypeI420 = {0x30323449,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71}};
// UYVY synonym with BT709 color components, used in HD video. This variation
// might appear in non-USB capture cards and it's implemented as a normal YUV
// pixel format with the characters HDYC encoded in the first array word.
GUID kMediaSubTypeHDYC = {0x43594448,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
GUID kMediaSubTypeZ16 = {0x2036315a,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
GUID kMediaSubTypeINVZ = {0x5a564e49,
0x2d90,
0x4a58,
{0x92, 0x0b, 0x77, 0x3f, 0x1f, 0x2c, 0x55, 0x6b}};
GUID kMediaSubTypeY16 = {0x20363159,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
SinkFilterObserver::~SinkFilterObserver() { SinkFilterObserver::~SinkFilterObserver() {
} }
......
...@@ -24,17 +24,38 @@ namespace media { ...@@ -24,17 +24,38 @@ namespace media {
// it is not defined in the DirectShow SDK. // it is not defined in the DirectShow SDK.
// http://msdn.microsoft.com/en-us/library/dd757532.aspx // http://msdn.microsoft.com/en-us/library/dd757532.aspx
// 30323449-0000-0010-8000-00AA00389B71. // 30323449-0000-0010-8000-00AA00389B71.
extern GUID kMediaSubTypeI420; const GUID kMediaSubTypeI420 = {
0x30323449,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71}};
// UYVY synonym with BT709 color components, used in HD video. This variation // UYVY synonym with BT709 color components, used in HD video. This variation
// might appear in non-USB capture cards and it's implemented as a normal YUV // might appear in non-USB capture cards and it's implemented as a normal YUV
// pixel format with the characters HDYC encoded in the first array word. // pixel format with the characters HDYC encoded in the first array word.
extern GUID kMediaSubTypeHDYC; const GUID kMediaSubTypeHDYC = {
0x43594448,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
;
// 16-bit grey-scale single plane formats provided by some depth cameras. // 16-bit grey-scale single plane formats provided by some depth cameras.
extern GUID kMediaSubTypeZ16; const GUID kMediaSubTypeZ16 = {
extern GUID kMediaSubTypeINVZ; 0x2036315a,
extern GUID kMediaSubTypeY16; 0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
const GUID kMediaSubTypeINVZ = {
0x5a564e49,
0x2d90,
0x4a58,
{0x92, 0x0b, 0x77, 0x3f, 0x1f, 0x2c, 0x55, 0x6b}};
const GUID kMediaSubTypeY16 = {
0x20363159,
0x0000,
0x0010,
{0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
class SinkInputPin; class SinkInputPin;
......
...@@ -320,8 +320,8 @@ static void GetDeviceSupportedFormatsMediaFoundation( ...@@ -320,8 +320,8 @@ static void GetDeviceSupportedFormatsMediaFoundation(
DLOG(ERROR) << "GetGUID failed: " << logging::SystemErrorCodeToString(hr); DLOG(ERROR) << "GetGUID failed: " << logging::SystemErrorCodeToString(hr);
return; return;
} }
VideoCaptureDeviceMFWin::FormatFromGuid(type_guid, VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
&capture_format.pixel_format); type_guid, &capture_format.pixel_format);
type.Reset(); type.Reset();
++stream_index; ++stream_index;
if (capture_format.pixel_format == PIXEL_FORMAT_UNKNOWN) if (capture_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
......
...@@ -108,11 +108,6 @@ scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMFPhotoCallback( ...@@ -108,11 +108,6 @@ scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMFPhotoCallback(
} }
} // namespace } // namespace
// Since IMFCaptureEngine performs the video decoding itself, and Chromium uses
// I420 at the other end of the pipe, video output format is forced to I420.
static const GUID kSinkMFVideoFormat = MFVideoFormat_I420;
static const VideoPixelFormat kSinkVideoPixelFormat = PIXEL_FORMAT_I420;
void LogError(const Location& from_here, HRESULT hr) { void LogError(const Location& from_here, HRESULT hr) {
DPLOG(ERROR) << from_here.ToString() DPLOG(ERROR) << from_here.ToString()
<< " hr = " << logging::SystemErrorCodeToString(hr); << " hr = " << logging::SystemErrorCodeToString(hr);
...@@ -138,21 +133,22 @@ static bool GetFrameRateFromMediaType(IMFMediaType* type, float* frame_rate) { ...@@ -138,21 +133,22 @@ static bool GetFrameRateFromMediaType(IMFMediaType* type, float* frame_rate) {
return true; return true;
} }
static bool GetFormatFromMediaType(IMFMediaType* type, static bool GetFormatFromSourceMediaType(IMFMediaType* source_media_type,
bool photo, bool photo,
VideoCaptureFormat* format) { VideoCaptureFormat* format) {
GUID major_type_guid; GUID major_type_guid;
if (FAILED(type->GetGUID(MF_MT_MAJOR_TYPE, &major_type_guid)) || if (FAILED(source_media_type->GetGUID(MF_MT_MAJOR_TYPE, &major_type_guid)) ||
(major_type_guid != MFMediaType_Image && (major_type_guid != MFMediaType_Image &&
(photo || !GetFrameRateFromMediaType(type, &format->frame_rate)))) { (photo ||
!GetFrameRateFromMediaType(source_media_type, &format->frame_rate)))) {
return false; return false;
} }
GUID sub_type_guid; GUID sub_type_guid;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &sub_type_guid)) || if (FAILED(source_media_type->GetGUID(MF_MT_SUBTYPE, &sub_type_guid)) ||
!GetFrameSizeFromMediaType(type, &format->frame_size) || !GetFrameSizeFromMediaType(source_media_type, &format->frame_size) ||
!VideoCaptureDeviceMFWin::FormatFromGuid(sub_type_guid, !VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
&format->pixel_format)) { sub_type_guid, &format->pixel_format)) {
return false; return false;
} }
...@@ -173,6 +169,69 @@ static HRESULT CopyAttribute(IMFAttributes* source_attributes, ...@@ -173,6 +169,69 @@ static HRESULT CopyAttribute(IMFAttributes* source_attributes,
return hr; return hr;
} }
struct MediaFormatConfiguration {
GUID mf_source_media_subtype;
GUID mf_sink_media_subtype;
VideoPixelFormat pixel_format;
};
static bool GetMediaFormatConfigurationFromMFSourceMediaSubtype(
const GUID& mf_source_media_subtype,
MediaFormatConfiguration* media_format_configuration) {
static const MediaFormatConfiguration kMediaFormatConfigurationMap[] = {
// IMFCaptureEngine inevitably performs the video frame decoding itself.
// This means that the sink must always be set to an uncompressed video
// format.
// Since chromium uses I420 at the other end of the pipe, MF known video
// output formats are always set to I420.
{MFVideoFormat_I420, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_YUY2, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_UYVY, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_RGB24, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_RGB32, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_ARGB32, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_MJPG, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_NV12, MFVideoFormat_I420, PIXEL_FORMAT_I420},
{MFVideoFormat_YV12, MFVideoFormat_I420, PIXEL_FORMAT_I420},
// Depth cameras use specific uncompressed video formats unknown to
// IMFCaptureEngine.
// Therefore, IMFCaptureEngine cannot perform any transcoding on these.
// So we ask IMFCaptureEngine to let the frame pass through, without
// transcoding.
{kMediaSubTypeY16, kMediaSubTypeY16, PIXEL_FORMAT_Y16},
{kMediaSubTypeZ16, kMediaSubTypeZ16, PIXEL_FORMAT_Y16},
{kMediaSubTypeINVZ, kMediaSubTypeINVZ, PIXEL_FORMAT_Y16},
// Photo type
{GUID_ContainerFormatJpeg, GUID_ContainerFormatJpeg, PIXEL_FORMAT_MJPEG}};
for (const auto& kMediaFormatConfiguration : kMediaFormatConfigurationMap) {
if (kMediaFormatConfiguration.mf_source_media_subtype ==
mf_source_media_subtype) {
*media_format_configuration = kMediaFormatConfiguration;
return true;
}
}
return false;
}
static HRESULT GetMFSinkMediaSubtype(IMFMediaType* source_media_type,
GUID* mf_sink_media_subtype) {
GUID source_subtype;
HRESULT hr = source_media_type->GetGUID(MF_MT_SUBTYPE, &source_subtype);
if (FAILED(hr))
return hr;
MediaFormatConfiguration media_format_configuration;
if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
source_subtype, &media_format_configuration))
return E_FAIL;
*mf_sink_media_subtype = media_format_configuration.mf_sink_media_subtype;
return S_OK;
}
static HRESULT ConvertToPhotoSinkMediaType( static HRESULT ConvertToPhotoSinkMediaType(
IMFMediaType* source_media_type, IMFMediaType* source_media_type,
IMFMediaType* destination_media_type) { IMFMediaType* destination_media_type) {
...@@ -181,7 +240,12 @@ static HRESULT ConvertToPhotoSinkMediaType( ...@@ -181,7 +240,12 @@ static HRESULT ConvertToPhotoSinkMediaType(
if (FAILED(hr)) if (FAILED(hr))
return hr; return hr;
hr = destination_media_type->SetGUID(MF_MT_SUBTYPE, GUID_ContainerFormatJpeg); GUID mf_sink_media_subtype;
hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype);
if (FAILED(hr))
return hr;
hr = destination_media_type->SetGUID(MF_MT_SUBTYPE, mf_sink_media_subtype);
if (FAILED(hr)) if (FAILED(hr))
return hr; return hr;
...@@ -195,7 +259,12 @@ static HRESULT ConvertToVideoSinkMediaType(IMFMediaType* source_media_type, ...@@ -195,7 +259,12 @@ static HRESULT ConvertToVideoSinkMediaType(IMFMediaType* source_media_type,
if (FAILED(hr)) if (FAILED(hr))
return hr; return hr;
hr = sink_media_type->SetGUID(MF_MT_SUBTYPE, kSinkMFVideoFormat); GUID mf_sink_media_subtype;
hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype);
if (FAILED(hr))
return hr;
hr = sink_media_type->SetGUID(MF_MT_SUBTYPE, mf_sink_media_subtype);
if (FAILED(hr)) if (FAILED(hr))
return hr; return hr;
...@@ -332,36 +401,16 @@ class MFVideoCallback final ...@@ -332,36 +401,16 @@ class MFVideoCallback final
}; };
// static // static
bool VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
const GUID& mf_source_media_subtype,
VideoPixelFormat* pixel_format) {
MediaFormatConfiguration media_format_configuration;
if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
mf_source_media_subtype, &media_format_configuration))
return false;
bool VideoCaptureDeviceMFWin::FormatFromGuid(const GUID& guid, *pixel_format = media_format_configuration.pixel_format;
VideoPixelFormat* format) { return true;
struct {
const GUID& guid;
const VideoPixelFormat format;
} static const kFormatMap[] = {
{MFVideoFormat_I420, kSinkVideoPixelFormat},
{MFVideoFormat_YUY2, kSinkVideoPixelFormat},
{MFVideoFormat_UYVY, kSinkVideoPixelFormat},
{MFVideoFormat_RGB24, kSinkVideoPixelFormat},
{MFVideoFormat_RGB32, kSinkVideoPixelFormat},
{MFVideoFormat_ARGB32, kSinkVideoPixelFormat},
{MFVideoFormat_MJPG, kSinkVideoPixelFormat},
{MFVideoFormat_NV12, kSinkVideoPixelFormat},
{MFVideoFormat_YV12, kSinkVideoPixelFormat},
{kMediaSubTypeY16, kSinkVideoPixelFormat},
{kMediaSubTypeZ16, kSinkVideoPixelFormat},
{kMediaSubTypeINVZ, kSinkVideoPixelFormat},
{GUID_ContainerFormatJpeg, PIXEL_FORMAT_MJPEG}};
for (const auto& kFormat : kFormatMap) {
if (kFormat.guid == guid) {
*format = kFormat.format;
return true;
}
}
return false;
} }
HRESULT VideoCaptureDeviceMFWin::ExecuteHresultCallbackWithRetries( HRESULT VideoCaptureDeviceMFWin::ExecuteHresultCallbackWithRetries(
...@@ -456,7 +505,7 @@ HRESULT VideoCaptureDeviceMFWin::FillCapabilities( ...@@ -456,7 +505,7 @@ HRESULT VideoCaptureDeviceMFWin::FillCapabilities(
media_type_index, media_type_index,
type.GetAddressOf()))) { type.GetAddressOf()))) {
VideoCaptureFormat format; VideoCaptureFormat format;
if (GetFormatFromMediaType(type.Get(), photo, &format)) if (GetFormatFromSourceMediaType(type.Get(), photo, &format))
capabilities->emplace_back(media_type_index, format, stream_index); capabilities->emplace_back(media_type_index, format, stream_index);
type.Reset(); type.Reset();
++media_type_index; ++media_type_index;
...@@ -710,8 +759,9 @@ void VideoCaptureDeviceMFWin::TakePhoto(TakePhotoCallback callback) { ...@@ -710,8 +759,9 @@ void VideoCaptureDeviceMFWin::TakePhoto(TakePhotoCallback callback) {
} }
VideoCaptureFormat format; VideoCaptureFormat format;
hr = GetFormatFromMediaType(sink_media_type.Get(), true, &format) ? S_OK hr = GetFormatFromSourceMediaType(sink_media_type.Get(), true, &format)
: E_FAIL; ? S_OK
: E_FAIL;
if (FAILED(hr)) { if (FAILED(hr)) {
LogError(FROM_HERE, hr); LogError(FROM_HERE, hr);
return; return;
......
...@@ -36,7 +36,8 @@ class MFVideoCallback; ...@@ -36,7 +36,8 @@ class MFVideoCallback;
class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice { class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
public: public:
static bool FormatFromGuid(const GUID& guid, VideoPixelFormat* format); static bool GetPixelFormatFromMFSourceMediaSubtype(const GUID& guid,
VideoPixelFormat* format);
explicit VideoCaptureDeviceMFWin( explicit VideoCaptureDeviceMFWin(
Microsoft::WRL::ComPtr<IMFMediaSource> source); Microsoft::WRL::ComPtr<IMFMediaSource> source);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment