Commit f9eaa531 authored by Réda Housni Alaoui's avatar Réda Housni Alaoui Committed by Commit Bot

Win video capture: use IMFCaptureEngine for Media Foundation

- Full rewrite of the MediaFoundation implementation video part to use
IMFCaptureEngine
- Implementation of takePhoto, setPhotoOptions and getPhotoCapabilities
- takePhoto triggers a still image capture with the highest available
resolution without stopping the video stream thanks to IMFCaptureEngine

TEST=adapted video_capture_device_unittest.cc and
webrtc_image_capture_browsertest.cc; launch Chrome with
--force-mediafoundation on Win8+ and capture video using
e.g. https://webrtc.github.io/samples/src/content/getusermedia/gum/

R=mcasas@chromium.org

Bug: 730068
Change-Id: Ib8e7f475d8120a63dd08c7b215c1eaf2c6f3d800
Reviewed-on: https://chromium-review.googlesource.com/734042
Commit-Queue: Christian Fremerey <chfremer@chromium.org>
Reviewed-by: default avatarMiguel Casas <mcasas@chromium.org>
Reviewed-by: default avatarChristian Fremerey <chfremer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#521435}
parent e7e22942
......@@ -667,6 +667,7 @@ Raveendra Karu <r.karu@samsung.com>
Ravi Nanjundappa <nravi.n@samsung.com>
Ravi Phaneendra Kasibhatla <r.kasibhatla@samsung.com>
Ravi Phaneendra Kasibhatla <ravi.kasibhatla@motorola.com>
Réda Housni Alaoui <alaoui.rda@gmail.com>
Refael Ackermann <refack@gmail.com>
Renata Hodovan <rhodovan.u-szeged@partner.samsung.com>
Rene Bolldorf <rb@radix.io>
......@@ -913,6 +914,7 @@ BlackBerry Limited <*@blackberry.com>
Canonical Limited <*@canonical.com>
Code Aurora Forum <*@codeaurora.org>
Comodo CA Limited
Cosium <*@cosium.com>
Endless Mobile, Inc. <*@endlessm.com>
Facebook, Inc. <*@fb.com>
Facebook, Inc. <*@oculus.com>
......
......@@ -46,13 +46,26 @@ namespace {
static const char kImageCaptureHtmlFile[] = "/media/image_capture_test.html";
enum class Camera {
FAKE,
DEFAULT,
#if defined(OS_WIN)
// Media Foundation is only available in Windows versions >= 7, below that the
// following flag has no effect
WIN_MEDIA_FOUNDATION
#endif
};
// TODO(mcasas): enable real-camera tests by disabling the Fake Device for
// platforms where the ImageCaptureCode is landed, https://crbug.com/656810
static struct TargetCamera {
bool use_fake;
} const kTargetCameras[] = {{true},
Camera camera;
} const kTargetCameras[] = {{Camera::FAKE},
#if defined(OS_LINUX) || defined(OS_MACOSX) || defined(OS_ANDROID)
{false}
{Camera::DEFAULT},
#endif
#if defined(OS_WIN)
{Camera::WIN_MEDIA_FOUNDATION}
#endif
};
......@@ -145,11 +158,23 @@ class WebRtcImageCaptureSucceedsBrowserTest
void SetUpCommandLine(base::CommandLine* command_line) override {
WebRtcImageCaptureBrowserTestBase::SetUpCommandLine(command_line);
if (std::get<0>(GetParam()).use_fake) {
base::CommandLine::ForCurrentProcess()->AppendSwitch(
switches::kUseFakeDeviceForMediaStream);
ASSERT_TRUE(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream));
switch (std::get<0>(GetParam()).camera) {
case Camera::FAKE:
base::CommandLine::ForCurrentProcess()->AppendSwitch(
switches::kUseFakeDeviceForMediaStream);
ASSERT_TRUE(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream));
break;
#if defined(OS_WIN)
case Camera::WIN_MEDIA_FOUNDATION:
base::CommandLine::ForCurrentProcess()->AppendSwitch(
switches::kForceMediaFoundationVideoCapture);
ASSERT_TRUE(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kForceMediaFoundationVideoCapture));
break;
#endif
default:
break;
}
}
......@@ -157,7 +182,7 @@ class WebRtcImageCaptureSucceedsBrowserTest
// TODO(chfremer): Enable test cases using the video capture service with
// real cameras as soon as root cause for https://crbug.com/733582 is
// understood and resolved.
if ((!std::get<0>(GetParam()).use_fake) &&
if ((std::get<0>(GetParam()).camera != Camera::FAKE) &&
(std::get<1>(GetParam()).use_video_capture_service)) {
LOG(INFO) << "Skipping this test case";
return true;
......
......@@ -6,7 +6,7 @@
<body>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script>
const WIDTH = 320;
const WIDTH = 640;
/** @const */ var CONSTRAINTS = { width: { max : WIDTH } };
// Returns a Promise resolved with |object| after a delay of |delayInMs|.
......
......@@ -28,9 +28,11 @@
#include "testing/gtest/include/gtest/gtest.h"
#if defined(OS_WIN)
#include <mfcaptureengine.h>
#include "base/win/scoped_com_initializer.h"
#include "base/win/windows_version.h" // For fine-grained suppression.
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
#endif
#if defined(OS_MACOSX)
......@@ -88,9 +90,16 @@
#define MAYBE_GetPhotoState DISABLED_GetPhotoState
#endif
// Wrap the TEST_P macro into another one to allow to preprocess |test_name|
// macros. Needed until https://github.com/google/googletest/issues/389 is
// fixed.
#define WRAPPED_TEST_P(test_case_name, test_name) \
TEST_P(test_case_name, test_name)
using ::testing::_;
using ::testing::Invoke;
using ::testing::SaveArg;
using ::testing::Return;
namespace media {
namespace {
......@@ -116,6 +125,35 @@ static bool IsDeviceUsableForTesting(
};
#endif
enum VideoCaptureImplementationTweak {
NONE,
#if defined(OS_WIN)
WIN_MEDIA_FOUNDATION
#endif
};
#if defined(OS_WIN)
class MockMFPhotoCallback final : public IMFCaptureEngineOnSampleCallback {
public:
~MockMFPhotoCallback() {}
MOCK_METHOD2(DoQueryInterface, HRESULT(REFIID, void**));
MOCK_METHOD0(DoAddRef, ULONG(void));
MOCK_METHOD0(DoRelease, ULONG(void));
MOCK_METHOD1(DoOnSample, HRESULT(IMFSample*));
STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
return DoQueryInterface(riid, object);
}
STDMETHOD_(ULONG, AddRef)() override { return DoAddRef(); }
STDMETHOD_(ULONG, Release)() override { return DoRelease(); }
STDMETHOD(OnSample)(IMFSample* sample) override { return DoOnSample(sample); }
};
#endif
class MockVideoCaptureClient : public VideoCaptureDevice::Client {
public:
MOCK_METHOD0(DoReserveOutputBuffer, void(void));
......@@ -261,7 +299,19 @@ testing::Environment* const mojo_test_env =
} // namespace
class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
class VideoCaptureDeviceTest
: public testing::TestWithParam<
std::tuple<gfx::Size, VideoCaptureImplementationTweak>> {
public:
#if defined(OS_WIN)
scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMockPhotoCallback(
MockMFPhotoCallback* mock_photo_callback,
VideoCaptureDevice::TakePhotoCallback callback,
VideoCaptureFormat format) {
return scoped_refptr<IMFCaptureEngineOnSampleCallback>(mock_photo_callback);
}
#endif
protected:
typedef VideoCaptureDevice::Client Client;
......@@ -294,6 +344,10 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
static_cast<VideoCaptureDeviceFactoryAndroid*>(
video_capture_device_factory_.get())
->ConfigureForTesting();
#elif defined(OS_WIN)
static_cast<VideoCaptureDeviceFactoryWin*>(
video_capture_device_factory_.get())
->set_use_media_foundation_for_testing(UseWinMediaFoundation());
#endif
EXPECT_CALL(*video_capture_client_, DoReserveOutputBuffer()).Times(0);
EXPECT_CALL(*video_capture_client_, DoOnIncomingCapturedBuffer()).Times(0);
......@@ -301,6 +355,12 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
.Times(0);
}
#if defined(OS_WIN)
bool UseWinMediaFoundation() {
return std::get<1>(GetParam()) == WIN_MEDIA_FOUNDATION;
}
#endif
void ResetWithNewClient() {
video_capture_client_.reset(new MockVideoCaptureClient(base::Bind(
&VideoCaptureDeviceTest::OnFrameCaptured, base::Unretained(this))));
......@@ -407,7 +467,7 @@ class VideoCaptureDeviceTest : public testing::TestWithParam<gfx::Size> {
#define MAYBE_OpenInvalidDevice OpenInvalidDevice
#endif
// Tries to allocate an invalid device and verifies it doesn't work.
TEST_F(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
VideoCaptureDeviceDescriptor invalid_descriptor;
invalid_descriptor.device_id = "jibberish";
invalid_descriptor.display_name = "jibberish";
......@@ -439,12 +499,12 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_OpenInvalidDevice) {
}
// Allocates the first enumerated device, and expects a frame.
TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
const gfx::Size& size = GetParam();
const gfx::Size& size = std::get<0>(GetParam());
if (!IsCaptureSizeSupported(*descriptor, size))
return;
const int width = size.width();
......@@ -474,14 +534,22 @@ TEST_P(VideoCaptureDeviceTest, CaptureWithSize) {
}
const gfx::Size kCaptureSizes[] = {gfx::Size(640, 480), gfx::Size(1280, 720)};
const VideoCaptureImplementationTweak kCaptureImplementationTweaks[] = {
NONE,
#if defined(OS_WIN)
WIN_MEDIA_FOUNDATION
#endif
};
INSTANTIATE_TEST_CASE_P(VideoCaptureDeviceTests,
VideoCaptureDeviceTest,
testing::ValuesIn(kCaptureSizes));
INSTANTIATE_TEST_CASE_P(
VideoCaptureDeviceTests,
VideoCaptureDeviceTest,
testing::Combine(testing::ValuesIn(kCaptureSizes),
testing::ValuesIn(kCaptureImplementationTweaks)));
// Allocates a device with an uncommon resolution and verifies frames are
// captured in a close, much more typical one.
TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
......@@ -508,7 +576,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_AllocateBadSize) {
}
// Cause hangs on Windows, Linux. Fails Android. https://crbug.com/417824
TEST_F(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
......@@ -552,7 +620,7 @@ TEST_F(VideoCaptureDeviceTest, DISABLED_ReAllocateCamera) {
}
// Starts the camera in 720p to try and capture MJPEG format.
TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
std::unique_ptr<VideoCaptureDeviceDescriptor> device_descriptor =
GetFirstDeviceDescriptorSupportingPixelFormat(PIXEL_FORMAT_MJPEG);
if (!device_descriptor) {
......@@ -589,7 +657,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_CaptureMjpeg) {
device->StopAndDeAllocate();
}
TEST_F(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
// Use PIXEL_FORMAT_MAX to iterate all device names for testing
// GetDeviceSupportedFormats().
std::unique_ptr<VideoCaptureDeviceDescriptor> device_descriptor =
......@@ -601,7 +669,7 @@ TEST_F(VideoCaptureDeviceTest, NoCameraSupportsPixelFormatMax) {
// Starts the camera and verifies that a photo can be taken. The correctness of
// the photo is enforced by MockImageCaptureClient.
TEST_F(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
......@@ -650,7 +718,7 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
}
// Starts the camera and verifies that the photo capabilities can be retrieved.
TEST_F(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
const auto descriptor = FindUsableDeviceDescriptor();
if (!descriptor)
return;
......@@ -701,4 +769,55 @@ TEST_F(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
device->StopAndDeAllocate();
}
#if defined(OS_WIN)
// Verifies that the photo callback is correctly released by MediaFoundation
WRAPPED_TEST_P(VideoCaptureDeviceTest, CheckPhotoCallbackRelease) {
if (!UseWinMediaFoundation())
return;
std::unique_ptr<VideoCaptureDeviceDescriptor> descriptor =
GetFirstDeviceDescriptorSupportingPixelFormat(PIXEL_FORMAT_MJPEG);
if (!descriptor) {
DVLOG(1) << "No usable media foundation device descriptor. Exiting test.";
return;
}
MockMFPhotoCallback* callback = new MockMFPhotoCallback();
EXPECT_CALL(*callback, DoQueryInterface(_, _)).WillRepeatedly(Return(S_OK));
EXPECT_CALL(*callback, DoAddRef()).WillOnce(Return(1U));
EXPECT_CALL(*callback, DoRelease()).WillOnce(Return(1U));
EXPECT_CALL(*callback, DoOnSample(_)).WillOnce(Return(S_OK));
EXPECT_CALL(*video_capture_client_, OnError(_, _)).Times(0);
EXPECT_CALL(*video_capture_client_, OnStarted());
std::unique_ptr<VideoCaptureDevice> device(
video_capture_device_factory_->CreateDevice(*descriptor));
ASSERT_TRUE(device);
static_cast<VideoCaptureDeviceMFWin*>(device.get())
->set_create_mf_photo_callback_for_testing(
base::Bind(&VideoCaptureDeviceTest::CreateMockPhotoCallback,
base::Unretained(this), callback));
VideoCaptureParams capture_params;
capture_params.requested_format.frame_size.SetSize(320, 240);
capture_params.requested_format.frame_rate = 30;
capture_params.requested_format.pixel_format = PIXEL_FORMAT_MJPEG;
device->AllocateAndStart(capture_params, std::move(video_capture_client_));
VideoCaptureDevice::TakePhotoCallback scoped_callback = base::BindOnce(
&MockImageCaptureClient::DoOnPhotoTaken, image_capture_client_);
base::RunLoop run_loop;
base::Closure quit_closure = BindToCurrentLoop(run_loop.QuitClosure());
EXPECT_CALL(*image_capture_client_.get(), OnCorrectPhotoTaken())
.WillOnce(RunClosure(quit_closure));
device->TakePhoto(std::move(scoped_callback));
run_loop.Run();
device->StopAndDeAllocate();
}
#endif
}; // namespace media
......@@ -69,10 +69,9 @@ static bool IsDeviceBlacklistedForQueryingDetailedFrameRates(
static bool LoadMediaFoundationDlls() {
static const wchar_t* const kMfDLLs[] = {
L"%WINDIR%\\system32\\mf.dll",
L"%WINDIR%\\system32\\mfplat.dll",
L"%WINDIR%\\system32\\mf.dll", L"%WINDIR%\\system32\\mfplat.dll",
L"%WINDIR%\\system32\\mfreadwrite.dll",
};
L"%WINDIR%\\system32\\MFCaptureEngine.dll"};
for (const wchar_t* kMfDLL : kMfDLLs) {
wchar_t path[MAX_PATH] = {0};
......@@ -86,8 +85,13 @@ static bool LoadMediaFoundationDlls() {
static bool PrepareVideoCaptureAttributesMediaFoundation(
IMFAttributes** attributes,
int count) {
if (!InitializeMediaFoundation())
// Once https://bugs.chromium.org/p/chromium/issues/detail?id=791615 is fixed,
// we must make sure that this method succeeds in capture_unittests context
// when MediaFoundation is enabled.
if (!VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() ||
!InitializeMediaFoundation()) {
return false;
}
if (FAILED(MFCreateAttributes(attributes, count)))
return false;
......@@ -286,8 +290,9 @@ static void GetDeviceSupportedFormatsMediaFoundation(
DWORD stream_index = 0;
ComPtr<IMFMediaType> type;
while (SUCCEEDED(reader->GetNativeMediaType(kFirstVideoStream, stream_index,
type.GetAddressOf()))) {
while (SUCCEEDED(hr = reader->GetNativeMediaType(
static_cast<DWORD>(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
stream_index, type.GetAddressOf()))) {
UINT32 width, height;
hr = MFGetAttributeSize(type.Get(), MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr)) {
......
......@@ -30,10 +30,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
const VideoCaptureDeviceDescriptor& device_descriptor,
VideoCaptureFormats* supported_formats) override;
void set_use_media_foundation_for_testing(bool use) {
use_media_foundation_ = use;
}
private:
// Media Foundation is available in Win7 and later, use it if explicitly
// forced via flag, else use DirectShow.
const bool use_media_foundation_;
bool use_media_foundation_;
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
};
......
......@@ -7,7 +7,9 @@
#include <mfapi.h>
#include <mferror.h>
#include <stddef.h>
#include <wincodec.h>
#include <thread>
#include <utility>
#include "base/location.h"
......@@ -17,14 +19,102 @@
#include "base/synchronization/waitable_event.h"
#include "base/win/scoped_co_mem.h"
#include "base/win/windows_version.h"
#include "media/capture/video/blob_utils.h"
#include "media/capture/video/win/capability_list_win.h"
#include "media/capture/video/win/sink_filter_win.h"
using base::win::ScopedCoMem;
using Microsoft::WRL::ComPtr;
using base::Location;
namespace media {
static bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
namespace {
class MFPhotoCallback final
: public base::RefCountedThreadSafe<MFPhotoCallback>,
public IMFCaptureEngineOnSampleCallback {
public:
MFPhotoCallback(VideoCaptureDevice::TakePhotoCallback callback,
VideoCaptureFormat format)
: callback_(std::move(callback)), format_(format) {}
STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
if (riid == IID_IUnknown || riid == IID_IMFCaptureEngineOnSampleCallback) {
AddRef();
*object = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
return S_OK;
}
return E_NOINTERFACE;
}
STDMETHOD_(ULONG, AddRef)() override {
base::RefCountedThreadSafe<MFPhotoCallback>::AddRef();
return 1U;
}
STDMETHOD_(ULONG, Release)() override {
base::RefCountedThreadSafe<MFPhotoCallback>::Release();
return 1U;
}
STDMETHOD(OnSample)(IMFSample* sample) override {
if (!sample)
return S_OK;
DWORD buffer_count = 0;
sample->GetBufferCount(&buffer_count);
for (DWORD i = 0; i < buffer_count; ++i) {
ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, buffer.GetAddressOf());
if (!buffer)
continue;
BYTE* data = nullptr;
DWORD max_length = 0;
DWORD length = 0;
buffer->Lock(&data, &max_length, &length);
mojom::BlobPtr blob = Blobify(data, length, format_);
buffer->Unlock();
if (blob) {
std::move(callback_).Run(std::move(blob));
// What is it supposed to mean if there is more than one buffer sent to
// us as a response to requesting a single still image? Are we supposed
// to somehow concatenate the buffers? Or is it safe to ignore extra
// buffers? For now, we ignore extra buffers.
break;
}
}
return S_OK;
}
private:
friend class base::RefCountedThreadSafe<MFPhotoCallback>;
~MFPhotoCallback() = default;
VideoCaptureDevice::TakePhotoCallback callback_;
const VideoCaptureFormat format_;
DISALLOW_COPY_AND_ASSIGN(MFPhotoCallback);
};
scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMFPhotoCallback(
VideoCaptureDevice::TakePhotoCallback callback,
VideoCaptureFormat format) {
return scoped_refptr<IMFCaptureEngineOnSampleCallback>(
new MFPhotoCallback(std::move(callback), format));
}
} // namespace
void LogError(const Location& from_here, HRESULT hr) {
#if !defined(NDEBUG)
DPLOG(ERROR) << from_here.ToString()
<< " hr = " << logging::SystemErrorCodeToString(hr);
#endif
}
static bool GetFrameSizeFromMediaType(IMFMediaType* type,
gfx::Size* frame_size) {
UINT32 width32, height32;
if (FAILED(MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width32, &height32)))
return false;
......@@ -32,7 +122,7 @@ static bool GetFrameSize(IMFMediaType* type, gfx::Size* frame_size) {
return true;
}
static bool GetFrameRate(IMFMediaType* type, float* frame_rate) {
static bool GetFrameRateFromMediaType(IMFMediaType* type, float* frame_rate) {
UINT32 numerator, denominator;
if (FAILED(MFGetAttributeRatio(type, MF_MT_FRAME_RATE, &numerator,
&denominator)) ||
......@@ -43,12 +133,19 @@ static bool GetFrameRate(IMFMediaType* type, float* frame_rate) {
return true;
}
static bool FillFormat(IMFMediaType* type, VideoCaptureFormat* format) {
GUID type_guid;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &type_guid)) ||
!GetFrameSize(type, &format->frame_size) ||
!GetFrameRate(type, &format->frame_rate) ||
!VideoCaptureDeviceMFWin::FormatFromGuid(type_guid,
static bool GetFormatFromMediaType(IMFMediaType* type,
VideoCaptureFormat* format) {
GUID major_type_guid;
if (FAILED(type->GetGUID(MF_MT_MAJOR_TYPE, &major_type_guid)) ||
(major_type_guid != MFMediaType_Image &&
!GetFrameRateFromMediaType(type, &format->frame_rate))) {
return false;
}
GUID sub_type_guid;
if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &sub_type_guid)) ||
!GetFrameSizeFromMediaType(type, &format->frame_size) ||
!VideoCaptureDeviceMFWin::FormatFromGuid(sub_type_guid,
&format->pixel_format)) {
return false;
}
......@@ -56,18 +153,98 @@ static bool FillFormat(IMFMediaType* type, VideoCaptureFormat* format) {
return true;
}
HRESULT FillCapabilities(IMFSourceReader* source,
static HRESULT CopyAttribute(IMFAttributes* source_attributes,
IMFAttributes* destination_attributes,
const GUID& key) {
PROPVARIANT var;
PropVariantInit(&var);
HRESULT hr = source_attributes->GetItem(key, &var);
if (FAILED(hr))
return hr;
hr = destination_attributes->SetItem(key, var);
PropVariantClear(&var);
return hr;
}
static HRESULT ConvertToPhotoJpegMediaType(
IMFMediaType* source_media_type,
IMFMediaType* destination_media_type) {
HRESULT hr =
destination_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Image);
if (FAILED(hr))
return hr;
hr = destination_media_type->SetGUID(MF_MT_SUBTYPE, GUID_ContainerFormatJpeg);
if (FAILED(hr))
return hr;
return CopyAttribute(source_media_type, destination_media_type,
MF_MT_FRAME_SIZE);
}
static const CapabilityWin& GetBestMatchedPhotoCapability(
ComPtr<IMFMediaType> current_media_type,
gfx::Size requested_size,
const CapabilityList& capabilities) {
gfx::Size current_size;
GetFrameSizeFromMediaType(current_media_type.Get(), &current_size);
int requested_height = requested_size.height() > 0 ? requested_size.height()
: current_size.height();
int requested_width = requested_size.width() > 0 ? requested_size.width()
: current_size.width();
const CapabilityWin* best_match = &(*capabilities.begin());
for (const CapabilityWin& capability : capabilities) {
int height = capability.supported_format.frame_size.height();
int width = capability.supported_format.frame_size.width();
int best_height = best_match->supported_format.frame_size.height();
int best_width = best_match->supported_format.frame_size.width();
if (std::abs(height - requested_height) <= std::abs(height - best_height) &&
std::abs(width - requested_width) <= std::abs(width - best_width)) {
best_match = &capability;
}
}
return *best_match;
}
HRESULT GetAvailableDeviceMediaType(IMFCaptureSource* source,
DWORD stream_index,
DWORD media_type_index,
IMFMediaType** type) {
HRESULT hr;
// Rarely, for some unknown reason, GetAvailableDeviceMediaType returns an
// undocumented MF_E_INVALIDREQUEST. Retrying solves the issue.
int retry_count = 0;
do {
hr = source->GetAvailableDeviceMediaType(stream_index, media_type_index,
type);
if (FAILED(hr))
base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(50));
// Give up after ~10 seconds
} while (hr == MF_E_INVALIDREQUEST && retry_count++ < 200);
return hr;
}
HRESULT FillCapabilities(DWORD stream,
IMFCaptureSource* source,
CapabilityList* capabilities) {
DWORD stream_index = 0;
Microsoft::WRL::ComPtr<IMFMediaType> type;
DWORD media_type_index = 0;
ComPtr<IMFMediaType> type;
HRESULT hr;
while (SUCCEEDED(hr = source->GetNativeMediaType(
kFirstVideoStream, stream_index, type.GetAddressOf()))) {
while (
SUCCEEDED(hr = GetAvailableDeviceMediaType(
source, stream, media_type_index, type.GetAddressOf()))) {
VideoCaptureFormat format;
if (FillFormat(type.Get(), &format))
capabilities->emplace_back(stream_index, format);
if (GetFormatFromMediaType(type.Get(), &format))
capabilities->emplace_back(media_type_index, format);
type.Reset();
++stream_index;
++media_type_index;
}
if (capabilities->empty() && (SUCCEEDED(hr) || hr == MF_E_NO_MORE_TYPES))
......@@ -76,40 +253,51 @@ HRESULT FillCapabilities(IMFSourceReader* source,
return (hr == MF_E_NO_MORE_TYPES) ? S_OK : hr;
}
class MFReaderCallback final
: public base::RefCountedThreadSafe<MFReaderCallback>,
public IMFSourceReaderCallback {
class MFVideoCallback final
: public base::RefCountedThreadSafe<MFVideoCallback>,
public IMFCaptureEngineOnSampleCallback,
public IMFCaptureEngineOnEventCallback {
public:
MFReaderCallback(VideoCaptureDeviceMFWin* observer)
: observer_(observer), wait_event_(NULL) {}
void SetSignalOnFlush(base::WaitableEvent* event) { wait_event_ = event; }
MFVideoCallback(VideoCaptureDeviceMFWin* observer) : observer_(observer) {}
STDMETHOD(QueryInterface)(REFIID riid, void** object) override {
if (riid != IID_IUnknown && riid != IID_IMFSourceReaderCallback)
return E_NOINTERFACE;
*object = static_cast<IMFSourceReaderCallback*>(this);
AddRef();
return S_OK;
HRESULT hr = E_NOINTERFACE;
if (riid == IID_IUnknown) {
*object = this;
hr = S_OK;
} else if (riid == IID_IMFCaptureEngineOnSampleCallback) {
*object = static_cast<IMFCaptureEngineOnSampleCallback*>(this);
hr = S_OK;
} else if (riid == IID_IMFCaptureEngineOnEventCallback) {
*object = static_cast<IMFCaptureEngineOnEventCallback*>(this);
hr = S_OK;
}
if (SUCCEEDED(hr))
AddRef();
return hr;
}
STDMETHOD_(ULONG, AddRef)() override {
base::RefCountedThreadSafe<MFReaderCallback>::AddRef();
base::RefCountedThreadSafe<MFVideoCallback>::AddRef();
return 1U;
}
STDMETHOD_(ULONG, Release)() override {
base::RefCountedThreadSafe<MFReaderCallback>::Release();
base::RefCountedThreadSafe<MFVideoCallback>::Release();
return 1U;
}
STDMETHOD(OnReadSample)
(HRESULT status,
DWORD stream_index,
DWORD stream_flags,
LONGLONG raw_time_stamp,
IMFSample* sample) override {
STDMETHOD(OnEvent)(IMFMediaEvent* media_event) override {
observer_->OnEvent(media_event);
return S_OK;
}
STDMETHOD(OnSample)(IMFSample* sample) override {
base::TimeTicks reference_time(base::TimeTicks::Now());
LONGLONG raw_time_stamp = 0;
sample->GetSampleTime(&raw_time_stamp);
base::TimeDelta timestamp =
base::TimeDelta::FromMicroseconds(raw_time_stamp / 10);
if (!sample) {
......@@ -121,9 +309,9 @@ class MFReaderCallback final
sample->GetBufferCount(&count);
for (DWORD i = 0; i < count; ++i) {
Microsoft::WRL::ComPtr<IMFMediaBuffer> buffer;
ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, buffer.GetAddressOf());
if (buffer.Get()) {
if (buffer) {
DWORD length = 0, max_length = 0;
BYTE* data = NULL;
buffer->Lock(&data, &max_length, &length);
......@@ -135,25 +323,10 @@ class MFReaderCallback final
return S_OK;
}
STDMETHOD(OnFlush)(DWORD stream_index) override {
if (wait_event_) {
wait_event_->Signal();
wait_event_ = NULL;
}
return S_OK;
}
STDMETHOD(OnEvent)(DWORD stream_index, IMFMediaEvent* event) override {
NOTIMPLEMENTED();
return S_OK;
}
private:
friend class base::RefCountedThreadSafe<MFReaderCallback>;
~MFReaderCallback() {}
friend class base::RefCountedThreadSafe<MFVideoCallback>;
~MFVideoCallback() {}
VideoCaptureDeviceMFWin* observer_;
base::WaitableEvent* wait_event_;
};
// static
......@@ -169,6 +342,7 @@ bool VideoCaptureDeviceMFWin::FormatFromGuid(const GUID& guid,
{MFVideoFormat_RGB24, PIXEL_FORMAT_RGB24},
{MFVideoFormat_ARGB32, PIXEL_FORMAT_ARGB},
{MFVideoFormat_MJPG, PIXEL_FORMAT_MJPEG},
{GUID_ContainerFormatJpeg, PIXEL_FORMAT_MJPEG},
{MFVideoFormat_YV12, PIXEL_FORMAT_YV12},
{kMediaSubTypeY16, PIXEL_FORMAT_Y16},
{kMediaSubTypeZ16, PIXEL_FORMAT_Y16},
......@@ -187,7 +361,9 @@ bool VideoCaptureDeviceMFWin::FormatFromGuid(const GUID& guid,
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor)
: descriptor_(device_descriptor), capture_(0) {
: descriptor_(device_descriptor),
create_mf_photo_callback_(base::Bind(&CreateMFPhotoCallback)),
is_started_(false) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
......@@ -195,20 +371,44 @@ VideoCaptureDeviceMFWin::~VideoCaptureDeviceMFWin() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
bool VideoCaptureDeviceMFWin::Init(
const Microsoft::WRL::ComPtr<IMFMediaSource>& source) {
bool VideoCaptureDeviceMFWin::Init(const ComPtr<IMFMediaSource>& source) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!reader_.Get());
DCHECK(!engine_);
Microsoft::WRL::ComPtr<IMFAttributes> attributes;
HRESULT hr = S_OK;
ComPtr<IMFAttributes> attributes;
ComPtr<IMFCaptureEngineClassFactory> capture_engine_class_factory;
MFCreateAttributes(attributes.GetAddressOf(), 1);
DCHECK(attributes.Get());
DCHECK(attributes);
callback_ = new MFReaderCallback(this);
attributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, callback_.get());
hr = CoCreateInstance(
CLSID_MFCaptureEngineClassFactory, NULL, CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(capture_engine_class_factory.GetAddressOf()));
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return false;
}
hr = capture_engine_class_factory->CreateInstance(
CLSID_MFCaptureEngine, IID_PPV_ARGS(engine_.GetAddressOf()));
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return false;
}
video_callback_ = new MFVideoCallback(this);
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return false;
}
return SUCCEEDED(MFCreateSourceReaderFromMediaSource(
source.Get(), attributes.Get(), reader_.GetAddressOf()));
hr = engine_->Initialize(video_callback_.get(), attributes.Get(), nullptr,
source.Get());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return false;
}
return true;
}
void VideoCaptureDeviceMFWin::AllocateAndStart(
......@@ -219,65 +419,314 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
base::AutoLock lock(lock_);
client_ = std::move(client);
DCHECK_EQ(capture_, false);
DCHECK_EQ(false, is_started_);
CapabilityList capabilities;
HRESULT hr = S_OK;
if (reader_.Get()) {
hr = FillCapabilities(reader_.Get(), &capabilities);
if (SUCCEEDED(hr)) {
const CapabilityWin found_capability =
GetBestMatchedCapability(params.requested_format, capabilities);
Microsoft::WRL::ComPtr<IMFMediaType> type;
hr = reader_->GetNativeMediaType(kFirstVideoStream,
found_capability.stream_index,
type.GetAddressOf());
if (SUCCEEDED(hr)) {
hr = reader_->SetCurrentMediaType(kFirstVideoStream, NULL, type.Get());
if (SUCCEEDED(hr)) {
hr =
reader_->ReadSample(kFirstVideoStream, 0, NULL, NULL, NULL, NULL);
if (SUCCEEDED(hr)) {
capture_format_ = found_capability.supported_format;
client_->OnStarted();
capture_ = true;
return;
}
}
}
}
if (!engine_) {
OnError(FROM_HERE, E_FAIL);
return;
}
ComPtr<IMFCaptureSource> source;
HRESULT hr = engine_->GetSource(source.GetAddressOf());
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
hr = FillCapabilities(kPreferredVideoPreviewStream, source.Get(),
&capabilities);
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
const CapabilityWin found_capability =
GetBestMatchedCapability(params.requested_format, capabilities);
ComPtr<IMFMediaType> type;
hr = GetAvailableDeviceMediaType(source.Get(), kPreferredVideoPreviewStream,
found_capability.stream_index,
type.GetAddressOf());
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
hr = source->SetCurrentDeviceMediaType(kPreferredVideoPreviewStream,
type.Get());
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
ComPtr<IMFCaptureSink> sink;
hr = engine_->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PREVIEW,
sink.GetAddressOf());
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
ComPtr<IMFCapturePreviewSink> preview_sink;
hr = sink->QueryInterface(IID_PPV_ARGS(preview_sink.GetAddressOf()));
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
hr = preview_sink->RemoveAllStreams();
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
DWORD dw_sink_stream_index = 0;
hr = preview_sink->AddStream(kPreferredVideoPreviewStream, type.Get(), NULL,
&dw_sink_stream_index);
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
hr = preview_sink->SetSampleCallback(dw_sink_stream_index,
video_callback_.get());
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
OnError(FROM_HERE, hr);
hr = engine_->StartPreview();
if (FAILED(hr)) {
OnError(FROM_HERE, hr);
return;
}
capture_video_format_ = found_capability.supported_format;
client_->OnStarted();
is_started_ = true;
}
void VideoCaptureDeviceMFWin::StopAndDeAllocate() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
base::WaitableEvent flushed(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
const int kFlushTimeOutInMs = 1000;
bool wait = false;
{
base::AutoLock lock(lock_);
if (capture_) {
capture_ = false;
callback_->SetSignalOnFlush(&flushed);
wait = SUCCEEDED(
reader_->Flush(static_cast<DWORD>(MF_SOURCE_READER_ALL_STREAMS)));
if (!wait) {
callback_->SetSignalOnFlush(NULL);
}
base::AutoLock lock(lock_);
if (is_started_ && engine_)
engine_->StopPreview();
is_started_ = false;
client_.reset();
}
void VideoCaptureDeviceMFWin::TakePhoto(TakePhotoCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!is_started_)
return;
ComPtr<IMFCaptureSource> source;
HRESULT hr = engine_->GetSource(source.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFMediaType> current_media_type;
hr = source->GetCurrentDeviceMediaType(kPreferredPhotoStream,
current_media_type.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFMediaType> photo_media_type;
hr = MFCreateMediaType(photo_media_type.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
hr = ConvertToPhotoJpegMediaType(current_media_type.Get(),
photo_media_type.Get());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
hr = source->SetCurrentDeviceMediaType(kPreferredPhotoStream,
photo_media_type.Get());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
VideoCaptureFormat format;
hr = GetFormatFromMediaType(photo_media_type.Get(), &format) ? S_OK : E_FAIL;
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFCaptureSink> sink;
hr = engine_->GetSink(MF_CAPTURE_ENGINE_SINK_TYPE_PHOTO, sink.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFCapturePhotoSink> photo_sink;
hr = sink->QueryInterface(IID_PPV_ARGS(photo_sink.GetAddressOf()));
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
hr = photo_sink->RemoveAllStreams();
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
DWORD dw_sink_stream_index = 0;
hr = photo_sink->AddStream(kPreferredPhotoStream, photo_media_type.Get(),
NULL, &dw_sink_stream_index);
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
scoped_refptr<IMFCaptureEngineOnSampleCallback> photo_callback =
create_mf_photo_callback_.Run(std::move(callback), format);
hr = photo_sink->SetSampleCallback(photo_callback.get());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
hr = engine_->TakePhoto();
if (FAILED(hr))
LogError(FROM_HERE, hr);
}
void VideoCaptureDeviceMFWin::GetPhotoState(GetPhotoStateCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!is_started_)
return;
ComPtr<IMFCaptureSource> source;
HRESULT hr = engine_->GetSource(source.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
CapabilityList capabilities;
hr = FillCapabilities(kPreferredPhotoStream, source.Get(), &capabilities);
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFMediaType> current_media_type;
hr = source->GetCurrentDeviceMediaType(kPreferredPhotoStream,
current_media_type.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
auto photo_capabilities = mojom::PhotoState::New();
gfx::Size current_size;
GetFrameSizeFromMediaType(current_media_type.Get(), &current_size);
gfx::Size min_size = gfx::Size(current_size.width(), current_size.height());
gfx::Size max_size = gfx::Size(current_size.width(), current_size.height());
for (const CapabilityWin& capability : capabilities) {
min_size.SetToMin(capability.supported_format.frame_size);
max_size.SetToMax(capability.supported_format.frame_size);
}
photo_capabilities->height = mojom::Range::New(
max_size.height(), min_size.height(), current_size.height(), 1);
photo_capabilities->width = mojom::Range::New(
max_size.width(), min_size.width(), current_size.width(), 1);
photo_capabilities->exposure_compensation = mojom::Range::New();
photo_capabilities->color_temperature = mojom::Range::New();
photo_capabilities->iso = mojom::Range::New();
photo_capabilities->brightness = mojom::Range::New();
photo_capabilities->contrast = mojom::Range::New();
photo_capabilities->saturation = mojom::Range::New();
photo_capabilities->sharpness = mojom::Range::New();
photo_capabilities->zoom = mojom::Range::New();
photo_capabilities->red_eye_reduction = mojom::RedEyeReduction::NEVER;
photo_capabilities->torch = false;
std::move(callback).Run(std::move(photo_capabilities));
}
void VideoCaptureDeviceMFWin::SetPhotoOptions(
mojom::PhotoSettingsPtr settings,
SetPhotoOptionsCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!is_started_)
return;
HRESULT hr = S_OK;
ComPtr<IMFCaptureSource> source;
hr = engine_->GetSource(source.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
if (settings->has_height || settings->has_width) {
CapabilityList capabilities;
hr = FillCapabilities(kPreferredPhotoStream, source.Get(), &capabilities);
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
ComPtr<IMFMediaType> current_media_type;
hr = source->GetCurrentDeviceMediaType(kPreferredPhotoStream,
current_media_type.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
gfx::Size requested_size = gfx::Size();
if (settings->has_height)
requested_size.set_height(settings->height);
if (settings->has_width)
requested_size.set_width(settings->width);
const CapabilityWin best_match = GetBestMatchedPhotoCapability(
current_media_type, requested_size, capabilities);
ComPtr<IMFMediaType> type;
hr = GetAvailableDeviceMediaType(source.Get(), kPreferredPhotoStream,
best_match.stream_index,
type.GetAddressOf());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
hr = source->SetCurrentDeviceMediaType(kPreferredPhotoStream, type.Get());
if (FAILED(hr)) {
LogError(FROM_HERE, hr);
return;
}
client_.reset();
}
// If the device has been unplugged, the Flush() won't trigger the event
// and a timeout will happen.
// TODO(tommi): Hook up the IMFMediaEventGenerator notifications API and
// do not wait at all after getting MEVideoCaptureDeviceRemoved event.
// See issue/226396.
if (wait)
flushed.TimedWait(base::TimeDelta::FromMilliseconds(kFlushTimeOutInMs));
std::move(callback).Run(true);
}
void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
......@@ -287,27 +736,27 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
base::AutoLock lock(lock_);
if (data && client_.get()) {
client_->OnIncomingCapturedData(data, length, capture_format_, rotation,
reference_time, timestamp);
client_->OnIncomingCapturedData(data, length, capture_video_format_,
rotation, reference_time, timestamp);
}
}
if (capture_) {
HRESULT hr =
reader_->ReadSample(kFirstVideoStream, 0, NULL, NULL, NULL, NULL);
if (FAILED(hr)) {
// If running the *VideoCap* unit tests on repeat, this can sometimes
// fail with HRESULT_FROM_WINHRESULT_FROM_WIN32(ERROR_INVALID_FUNCTION).
// It's not clear to me why this is, but it is possible that it has
// something to do with this bug:
// http://support.microsoft.com/kb/979567
OnError(FROM_HERE, hr);
}
}
void VideoCaptureDeviceMFWin::OnEvent(IMFMediaEvent* media_event) {
base::AutoLock lock(lock_);
GUID event_type;
HRESULT hr = media_event->GetExtendedType(&event_type);
if (SUCCEEDED(hr) && event_type == MF_CAPTURE_ENGINE_ERROR)
media_event->GetStatus(&hr);
if (FAILED(hr))
OnError(FROM_HERE, hr);
}
void VideoCaptureDeviceMFWin::OnError(const base::Location& from_here,
HRESULT hr) {
void VideoCaptureDeviceMFWin::OnError(const Location& from_here, HRESULT hr) {
if (client_.get()) {
client_->OnError(
from_here,
......@@ -316,4 +765,4 @@ void VideoCaptureDeviceMFWin::OnError(const base::Location& from_here,
}
}
} // namespace media
} // namespace media
\ No newline at end of file
......@@ -3,12 +3,13 @@
// found in the LICENSE file.
// Windows specific implementation of VideoCaptureDevice.
// DirectShow is used for capturing. DirectShow provide its own threads
// for capturing.
// MediaFoundation is used for capturing. MediaFoundation provides its own
// threads for capturing.
#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DEVICE_MF_WIN_H_
#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DEVICE_MF_WIN_H_
#include <mfcaptureengine.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <stdint.h>
......@@ -16,9 +17,9 @@
#include <vector>
#include "base/callback_forward.h"
#include "base/macros.h"
#include "base/sequence_checker.h"
#include "base/synchronization/lock.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device.h"
......@@ -30,10 +31,12 @@ class Location;
namespace media {
class MFReaderCallback;
class MFVideoCallback;
const DWORD kFirstVideoStream =
static_cast<DWORD>(MF_SOURCE_READER_FIRST_VIDEO_STREAM);
const DWORD kPreferredVideoPreviewStream = static_cast<DWORD>(
MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_PREVIEW);
const DWORD kPreferredPhotoStream =
static_cast<DWORD>(MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_PHOTO);
class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
public:
......@@ -51,6 +54,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
const VideoCaptureParams& params,
std::unique_ptr<VideoCaptureDevice::Client> client) override;
void StopAndDeAllocate() override;
void TakePhoto(TakePhotoCallback callback) override;
void GetPhotoState(GetPhotoStateCallback callback) override;
void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
SetPhotoOptionsCallback callback) override;
// Captured new video data.
void OnIncomingCapturedData(const uint8_t* data,
......@@ -58,19 +65,33 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
int rotation,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
void OnEvent(IMFMediaEvent* media_event);
using CreateMFPhotoCallbackCB =
base::Callback<scoped_refptr<IMFCaptureEngineOnSampleCallback>(
VideoCaptureDevice::TakePhotoCallback callback,
VideoCaptureFormat format)>;
void set_create_mf_photo_callback_for_testing(CreateMFPhotoCallbackCB cb) {
create_mf_photo_callback_ = cb;
}
private:
void OnError(const base::Location& from_here, HRESULT hr);
VideoCaptureDeviceDescriptor descriptor_;
Microsoft::WRL::ComPtr<IMFActivate> device_;
scoped_refptr<MFReaderCallback> callback_;
CreateMFPhotoCallbackCB create_mf_photo_callback_;
scoped_refptr<MFVideoCallback> video_callback_;
// Guards the below variables from concurrent access between methods running
// on |sequence_checker_| and calls to OnIncomingCapturedData() and OnEvent()
// made by MediaFoundation on threads outside of our control.
base::Lock lock_;
base::Lock lock_; // Used to guard the below variables.
std::unique_ptr<VideoCaptureDevice::Client> client_;
Microsoft::WRL::ComPtr<IMFSourceReader> reader_;
VideoCaptureFormat capture_format_;
bool capture_;
Microsoft::WRL::ComPtr<IMFCaptureEngine> engine_;
VideoCaptureFormat capture_video_format_;
bool is_started_;
SEQUENCE_CHECKER(sequence_checker_);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment