Commit c34e1fc2 authored by Ricky Liang's avatar Ricky Liang Committed by Commit Bot

Video capture with GpuMemoryBuffer - [Fake|File]VideoCaptureDevice

This CL enables FakeVideoCaptureDevice and FileVideoCaptureDevice
to provide fake camera frames backed by GpuMemoryBuffer.

Bug: 982201, 1006613
Test: capture_unittests
Test: tast.DecodeAccelUsedVP8
Test: Manually with appr.tc and Hangouts Meet

Change-Id: I495d1cf11f4f4b9181842c373023b907d7f77f25
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1863055
Commit-Queue: Ricky Liang <jcliang@chromium.org>
Reviewed-by: default avatarChristian Fremerey <chfremer@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Cr-Commit-Position: refs/heads/master@{#706829}
parent 40294377
...@@ -141,11 +141,8 @@ void ServiceVideoCaptureDeviceLauncher::LaunchDeviceAsync( ...@@ -141,11 +141,8 @@ void ServiceVideoCaptureDeviceLauncher::LaunchDeviceAsync(
// GpuMemoryBuffer-based VideoCapture buffer works only on the Chrome OS // GpuMemoryBuffer-based VideoCapture buffer works only on the Chrome OS
// VideoCaptureDevice implementation. It's not supported by // VideoCaptureDevice implementation. It's not supported by
// FakeVideoCaptureDevice. // FakeVideoCaptureDevice.
// TODO: Support GpuMemoryBuffer in FakeVideoCaptureDevice (crbug.com/1006613)
if (base::CommandLine::ForCurrentProcess()->HasSwitch( if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kVideoCaptureUseGpuMemoryBuffer) && switches::kVideoCaptureUseGpuMemoryBuffer)) {
!base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream)) {
new_params.buffer_type = media::VideoCaptureBufferType::kGpuMemoryBuffer; new_params.buffer_type = media::VideoCaptureBufferType::kGpuMemoryBuffer;
} }
......
...@@ -68,6 +68,8 @@ jumbo_source_set("capture_device_specific") { ...@@ -68,6 +68,8 @@ jumbo_source_set("capture_device_specific") {
"video/file_video_capture_device.h", "video/file_video_capture_device.h",
"video/file_video_capture_device_factory.cc", "video/file_video_capture_device_factory.cc",
"video/file_video_capture_device_factory.h", "video/file_video_capture_device_factory.h",
"video/gpu_memory_buffer_utils.cc",
"video/gpu_memory_buffer_utils.h",
"video/video_capture_buffer_handle.cc", "video/video_capture_buffer_handle.cc",
"video/video_capture_buffer_handle.h", "video/video_capture_buffer_handle.h",
"video/video_capture_device.cc", "video/video_capture_device.cc",
...@@ -83,6 +85,7 @@ jumbo_source_set("capture_device_specific") { ...@@ -83,6 +85,7 @@ jumbo_source_set("capture_device_specific") {
"//base", "//base",
"//base:i18n", "//base:i18n",
"//gpu/command_buffer/client", "//gpu/command_buffer/client",
"//gpu/ipc/common:common",
"//media", "//media",
"//media/capture/mojom:image_capture", "//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types", "//media/capture/mojom:image_capture_types",
......
...@@ -18,9 +18,11 @@ ...@@ -18,9 +18,11 @@
#include "base/threading/thread_checker.h" #include "base/threading/thread_checker.h"
#include "base/threading/thread_task_runner_handle.h" #include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h" #include "base/time/time.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/audio/fake_audio_input_stream.h" #include "media/audio/fake_audio_input_stream.h"
#include "media/base/video_frame.h" #include "media/base/video_frame.h"
#include "media/capture/mojom/image_capture_types.h" #include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/gpu_memory_buffer_utils.h"
#include "third_party/skia/include/core/SkBitmap.h" #include "third_party/skia/include/core/SkBitmap.h"
#include "third_party/skia/include/core/SkCanvas.h" #include "third_party/skia/include/core/SkCanvas.h"
#include "third_party/skia/include/core/SkFont.h" #include "third_party/skia/include/core/SkFont.h"
...@@ -79,9 +81,10 @@ PixelFormatMatchType DetermineFormatMatchType( ...@@ -79,9 +81,10 @@ PixelFormatMatchType DetermineFormatMatchType(
: PixelFormatMatchType::INCOMPATIBLE; : PixelFormatMatchType::INCOMPATIBLE;
} }
const VideoCaptureFormat& FindClosestSupportedFormat( VideoCaptureFormat FindClosestSupportedFormat(
const VideoCaptureFormat& requested_format, const VideoCaptureFormat& requested_format,
const VideoCaptureFormats& supported_formats) { const VideoCaptureFormats& supported_formats,
bool video_capture_use_gmb) {
DCHECK(!supported_formats.empty()); DCHECK(!supported_formats.empty());
int best_index = 0; int best_index = 0;
PixelFormatMatchType best_format_match = PixelFormatMatchType::INCOMPATIBLE; PixelFormatMatchType best_format_match = PixelFormatMatchType::INCOMPATIBLE;
...@@ -115,7 +118,15 @@ const VideoCaptureFormat& FindClosestSupportedFormat( ...@@ -115,7 +118,15 @@ const VideoCaptureFormat& FindClosestSupportedFormat(
best_index = i; best_index = i;
} }
} }
return supported_formats[best_index];
VideoCaptureFormat format = supported_formats[best_index];
// We use NV12 as the underlying opaque pixel format for GpuMemoryBuffer
// frames.
if (video_capture_use_gmb) {
format.pixel_format = PIXEL_FORMAT_NV12;
}
return format;
} }
gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) { gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) {
...@@ -231,13 +242,37 @@ class JpegEncodingFrameDeliverer : public FrameDeliverer { ...@@ -231,13 +242,37 @@ class JpegEncodingFrameDeliverer : public FrameDeliverer {
std::vector<unsigned char> jpeg_buffer_; std::vector<unsigned char> jpeg_buffer_;
}; };
// Delivers frames using GpuMemoryBuffer buffers reserved from the client buffer
// pool via OnIncomingCapturedBuffer();
class GpuMemoryBufferFrameDeliverer : public FrameDeliverer {
public:
GpuMemoryBufferFrameDeliverer(
std::unique_ptr<PacmanFramePainter> frame_painter,
gpu::GpuMemoryBufferSupport* gmb_support);
~GpuMemoryBufferFrameDeliverer() override;
// Implementation of FrameDeliveryStrategy
void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
private:
gpu::GpuMemoryBufferSupport* gmb_support_;
};
FrameDelivererFactory::FrameDelivererFactory( FrameDelivererFactory::FrameDelivererFactory(
FakeVideoCaptureDevice::DeliveryMode delivery_mode, FakeVideoCaptureDevice::DeliveryMode delivery_mode,
const FakeDeviceState* device_state) const FakeDeviceState* device_state,
: delivery_mode_(delivery_mode), device_state_(device_state) {} std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support)
: delivery_mode_(delivery_mode),
device_state_(device_state),
gmb_support_(gmb_support
? std::move(gmb_support)
: std::make_unique<gpu::GpuMemoryBufferSupport>()) {}
FrameDelivererFactory::~FrameDelivererFactory() = default;
std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer( std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer(
const VideoCaptureFormat& format) { const VideoCaptureFormat& format,
bool video_capture_use_gmb) {
PacmanFramePainter::Format painter_format; PacmanFramePainter::Format painter_format;
switch (format.pixel_format) { switch (format.pixel_format) {
case PIXEL_FORMAT_I420: case PIXEL_FORMAT_I420:
...@@ -249,6 +284,9 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer( ...@@ -249,6 +284,9 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer(
case PIXEL_FORMAT_MJPEG: case PIXEL_FORMAT_MJPEG:
painter_format = PacmanFramePainter::Format::SK_N32; painter_format = PacmanFramePainter::Format::SK_N32;
break; break;
case PIXEL_FORMAT_NV12:
painter_format = PacmanFramePainter::Format::NV12;
break;
default: default:
NOTREACHED(); NOTREACHED();
painter_format = PacmanFramePainter::Format::I420; painter_format = PacmanFramePainter::Format::I420;
...@@ -266,6 +304,11 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer( ...@@ -266,6 +304,11 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer(
delivery_mode = delivery_mode =
FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS; FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS;
} }
if (video_capture_use_gmb) {
DLOG(INFO) << "Forcing GpuMemoryBufferFrameDeliverer";
delivery_mode =
FakeVideoCaptureDevice::DeliveryMode::USE_GPU_MEMORY_BUFFERS;
}
switch (delivery_mode) { switch (delivery_mode) {
case FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: case FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS:
...@@ -279,6 +322,9 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer( ...@@ -279,6 +322,9 @@ std::unique_ptr<FrameDeliverer> FrameDelivererFactory::CreateFrameDeliverer(
case FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: case FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS:
return std::make_unique<ClientBufferFrameDeliverer>( return std::make_unique<ClientBufferFrameDeliverer>(
std::move(frame_painter)); std::move(frame_painter));
case FakeVideoCaptureDevice::DeliveryMode::USE_GPU_MEMORY_BUFFERS:
return std::make_unique<GpuMemoryBufferFrameDeliverer>(
std::move(frame_painter), gmb_support_.get());
} }
NOTREACHED(); NOTREACHED();
return nullptr; return nullptr;
...@@ -330,6 +376,8 @@ void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time, ...@@ -330,6 +376,8 @@ void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time,
target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8;
break; break;
case Format::I420: case Format::I420:
case Format::NV12:
// I420 and NV12 has the same Y plane dimension.
target_buffer[offset] = value >> 8; target_buffer[offset] = value >> 8;
break; break;
} }
...@@ -346,12 +394,16 @@ void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, ...@@ -346,12 +394,16 @@ void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
SkColorType colorspace = kAlpha_8_SkColorType; SkColorType colorspace = kAlpha_8_SkColorType;
switch (pixel_format_) { switch (pixel_format_) {
case Format::I420: case Format::I420:
case Format::NV12:
// Skia doesn't support painting in I420. Instead, paint an 8bpp // Skia doesn't support painting in I420. Instead, paint an 8bpp
// monochrome image to the beginning of |target_buffer|. This section of // monochrome image to the beginning of |target_buffer|. This section of
// |target_buffer| corresponds to the Y-plane of the YUV image. Do not // |target_buffer| corresponds to the Y-plane of the YUV image. Do not
// touch the U or V planes of |target_buffer|. Assuming they have been // touch the U or V planes of |target_buffer|. Assuming they have been
// initialized to 0, which corresponds to a green color tone, the result // initialized to 0, which corresponds to a green color tone, the result
// will be an green-ish monochrome frame. // will be an green-ish monochrome frame.
//
// NV12 has the same Y plane dimension as I420 and we don't touch UV
// plane.
colorspace = kAlpha_8_SkColorType; colorspace = kAlpha_8_SkColorType;
break; break;
case Format::SK_N32: case Format::SK_N32:
...@@ -480,13 +532,15 @@ void FakeVideoCaptureDevice::AllocateAndStart( ...@@ -480,13 +532,15 @@ void FakeVideoCaptureDevice::AllocateAndStart(
std::unique_ptr<VideoCaptureDevice::Client> client) { std::unique_ptr<VideoCaptureDevice::Client> client) {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
const VideoCaptureFormat& selected_format = bool video_capture_use_gmb =
FindClosestSupportedFormat(params.requested_format, supported_formats_); (params.buffer_type == VideoCaptureBufferType::kGpuMemoryBuffer);
VideoCaptureFormat selected_format = FindClosestSupportedFormat(
params.requested_format, supported_formats_, video_capture_use_gmb);
beep_time_ = base::TimeDelta(); beep_time_ = base::TimeDelta();
elapsed_time_ = base::TimeDelta(); elapsed_time_ = base::TimeDelta();
frame_deliverer_ = frame_deliverer_ = frame_deliverer_factory_->CreateFrameDeliverer(
frame_deliverer_factory_->CreateFrameDeliverer(selected_format); selected_format, video_capture_use_gmb);
device_state_->format.frame_size = selected_format.frame_size; device_state_->format.frame_size = selected_format.frame_size;
frame_deliverer_->Initialize(device_state_->format.pixel_format, frame_deliverer_->Initialize(device_state_->format.pixel_format,
std::move(client), device_state_.get()); std::move(client), device_state_.get());
...@@ -738,6 +792,45 @@ void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame( ...@@ -738,6 +792,45 @@ void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame(
CalculateTimeSinceFirstInvocation(now)); CalculateTimeSinceFirstInvocation(now));
} }
GpuMemoryBufferFrameDeliverer::GpuMemoryBufferFrameDeliverer(
std::unique_ptr<PacmanFramePainter> frame_painter,
gpu::GpuMemoryBufferSupport* gmb_support)
: FrameDeliverer(std::move(frame_painter)), gmb_support_(gmb_support) {}
GpuMemoryBufferFrameDeliverer::~GpuMemoryBufferFrameDeliverer() = default;
void GpuMemoryBufferFrameDeliverer::PaintAndDeliverNextFrame(
base::TimeDelta timestamp_to_paint) {
if (!client())
return;
std::unique_ptr<gfx::GpuMemoryBuffer> gmb;
VideoCaptureDevice::Client::Buffer capture_buffer;
const gfx::Size& buffer_size = device_state()->format.frame_size;
auto reserve_result = AllocateNV12GpuMemoryBuffer(
client(), buffer_size, gmb_support_, &gmb, &capture_buffer);
if (reserve_result != VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
client()->OnFrameDropped(
ConvertReservationFailureToFrameDropReason(reserve_result));
return;
}
ScopedNV12GpuMemoryBufferMapping scoped_mapping(std::move(gmb));
memset(scoped_mapping.y_plane(), 0,
scoped_mapping.y_stride() * buffer_size.height());
memset(scoped_mapping.uv_plane(), 0,
scoped_mapping.uv_stride() * (buffer_size.height() / 2));
frame_painter()->PaintFrame(timestamp_to_paint, scoped_mapping.y_plane());
base::TimeTicks now = base::TimeTicks::Now();
VideoCaptureFormat modified_format = device_state()->format;
// When GpuMemoryBuffer is used, the frame data is opaque to the CPU for most
// of the time. Currently the only supported underlying format is NV12.
modified_format.pixel_format = PIXEL_FORMAT_NV12;
client()->OnIncomingCapturedBuffer(std::move(capture_buffer), modified_format,
now,
CalculateTimeSinceFirstInvocation(now));
}
void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
base::TimeTicks expected_execution_time) { base::TimeTicks expected_execution_time) {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
......
...@@ -14,6 +14,10 @@ ...@@ -14,6 +14,10 @@
#include "base/threading/thread_checker.h" #include "base/threading/thread_checker.h"
#include "media/capture/video/video_capture_device.h" #include "media/capture/video/video_capture_device.h"
namespace gpu {
class GpuMemoryBufferSupport;
} // namespace gpu
namespace media { namespace media {
struct FakeDeviceState; struct FakeDeviceState;
...@@ -25,7 +29,7 @@ class FrameDelivererFactory; ...@@ -25,7 +29,7 @@ class FrameDelivererFactory;
// as a frame count and timer. // as a frame count and timer.
class PacmanFramePainter { class PacmanFramePainter {
public: public:
enum class Format { I420, SK_N32, Y16 }; enum class Format { I420, SK_N32, Y16, NV12 };
PacmanFramePainter(Format pixel_format, PacmanFramePainter(Format pixel_format,
const FakeDeviceState* fake_device_state); const FakeDeviceState* fake_device_state);
...@@ -50,7 +54,8 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice { ...@@ -50,7 +54,8 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
public: public:
enum class DeliveryMode { enum class DeliveryMode {
USE_DEVICE_INTERNAL_BUFFERS, USE_DEVICE_INTERNAL_BUFFERS,
USE_CLIENT_PROVIDED_BUFFERS USE_CLIENT_PROVIDED_BUFFERS,
USE_GPU_MEMORY_BUFFERS,
}; };
enum class DisplayMediaType { ANY, MONITOR, WINDOW, BROWSER }; enum class DisplayMediaType { ANY, MONITOR, WINDOW, BROWSER };
...@@ -134,15 +139,20 @@ struct FakeDeviceState { ...@@ -134,15 +139,20 @@ struct FakeDeviceState {
// A dependency needed by FakeVideoCaptureDevice. // A dependency needed by FakeVideoCaptureDevice.
class FrameDelivererFactory { class FrameDelivererFactory {
public: public:
FrameDelivererFactory(FakeVideoCaptureDevice::DeliveryMode delivery_mode, FrameDelivererFactory(
const FakeDeviceState* device_state); FakeVideoCaptureDevice::DeliveryMode delivery_mode,
const FakeDeviceState* device_state,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support);
~FrameDelivererFactory();
std::unique_ptr<FrameDeliverer> CreateFrameDeliverer( std::unique_ptr<FrameDeliverer> CreateFrameDeliverer(
const VideoCaptureFormat& format); const VideoCaptureFormat& format,
bool video_capture_use_gmb);
private: private:
const FakeVideoCaptureDevice::DeliveryMode delivery_mode_; const FakeVideoCaptureDevice::DeliveryMode delivery_mode_;
const FakeDeviceState* device_state_ = nullptr; const FakeDeviceState* device_state_ = nullptr;
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support_;
}; };
struct FakePhotoDeviceConfig { struct FakePhotoDeviceConfig {
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "base/strings/string_util.h" #include "base/strings/string_util.h"
#include "base/strings/stringprintf.h" #include "base/strings/stringprintf.h"
#include "build/build_config.h" #include "build/build_config.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/base/media_switches.h" #include "media/base/media_switches.h"
namespace { namespace {
...@@ -114,7 +115,8 @@ FakeVideoCaptureDeviceFactory::~FakeVideoCaptureDeviceFactory() = default; ...@@ -114,7 +115,8 @@ FakeVideoCaptureDeviceFactory::~FakeVideoCaptureDeviceFactory() = default;
// static // static
std::unique_ptr<VideoCaptureDevice> std::unique_ptr<VideoCaptureDevice>
FakeVideoCaptureDeviceFactory::CreateDeviceWithSettings( FakeVideoCaptureDeviceFactory::CreateDeviceWithSettings(
const FakeVideoCaptureDeviceSettings& settings) { const FakeVideoCaptureDeviceSettings& settings,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support) {
if (settings.supported_formats.empty()) if (settings.supported_formats.empty())
return CreateErrorDevice(); return CreateErrorDevice();
...@@ -147,8 +149,8 @@ FakeVideoCaptureDeviceFactory::CreateDeviceWithSettings( ...@@ -147,8 +149,8 @@ FakeVideoCaptureDeviceFactory::CreateDeviceWithSettings(
return std::make_unique<FakeVideoCaptureDevice>( return std::make_unique<FakeVideoCaptureDevice>(
settings.supported_formats, settings.supported_formats,
std::make_unique<FrameDelivererFactory>(settings.delivery_mode, std::make_unique<FrameDelivererFactory>(
device_state.get()), settings.delivery_mode, device_state.get(), std::move(gmb_support)),
std::move(photo_device), std::move(device_state)); std::move(photo_device), std::move(device_state));
} }
...@@ -157,13 +159,14 @@ std::unique_ptr<VideoCaptureDevice> ...@@ -157,13 +159,14 @@ std::unique_ptr<VideoCaptureDevice>
FakeVideoCaptureDeviceFactory::CreateDeviceWithDefaultResolutions( FakeVideoCaptureDeviceFactory::CreateDeviceWithDefaultResolutions(
VideoPixelFormat pixel_format, VideoPixelFormat pixel_format,
FakeVideoCaptureDevice::DeliveryMode delivery_mode, FakeVideoCaptureDevice::DeliveryMode delivery_mode,
float frame_rate) { float frame_rate,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support) {
FakeVideoCaptureDeviceSettings settings; FakeVideoCaptureDeviceSettings settings;
settings.delivery_mode = delivery_mode; settings.delivery_mode = delivery_mode;
for (const gfx::Size& resolution : kDefaultResolutions) for (const gfx::Size& resolution : kDefaultResolutions)
settings.supported_formats.emplace_back(resolution, frame_rate, settings.supported_formats.emplace_back(resolution, frame_rate,
pixel_format); pixel_format);
return CreateDeviceWithSettings(settings); return CreateDeviceWithSettings(settings, std::move(gmb_support));
} }
// static // static
......
...@@ -14,6 +14,10 @@ ...@@ -14,6 +14,10 @@
#include "media/capture/video/fake_video_capture_device.h" #include "media/capture/video/fake_video_capture_device.h"
#include "media/capture/video/video_capture_device_factory.h" #include "media/capture/video/video_capture_device_factory.h"
namespace gpu {
class GpuMemoryBufferSupport;
} // namespace gpu
namespace media { namespace media {
struct CAPTURE_EXPORT FakeVideoCaptureDeviceSettings { struct CAPTURE_EXPORT FakeVideoCaptureDeviceSettings {
...@@ -49,12 +53,14 @@ class CAPTURE_EXPORT FakeVideoCaptureDeviceFactory ...@@ -49,12 +53,14 @@ class CAPTURE_EXPORT FakeVideoCaptureDeviceFactory
~FakeVideoCaptureDeviceFactory() override; ~FakeVideoCaptureDeviceFactory() override;
static std::unique_ptr<VideoCaptureDevice> CreateDeviceWithSettings( static std::unique_ptr<VideoCaptureDevice> CreateDeviceWithSettings(
const FakeVideoCaptureDeviceSettings& settings); const FakeVideoCaptureDeviceSettings& settings,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support = nullptr);
static std::unique_ptr<VideoCaptureDevice> CreateDeviceWithDefaultResolutions( static std::unique_ptr<VideoCaptureDevice> CreateDeviceWithDefaultResolutions(
VideoPixelFormat pixel_format, VideoPixelFormat pixel_format,
FakeVideoCaptureDevice::DeliveryMode delivery_mode, FakeVideoCaptureDevice::DeliveryMode delivery_mode,
float frame_rate); float frame_rate,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support = nullptr);
// Creates a device that reports OnError() when AllocateAndStart() is called. // Creates a device that reports OnError() when AllocateAndStart() is called.
static std::unique_ptr<VideoCaptureDevice> CreateErrorDevice(); static std::unique_ptr<VideoCaptureDevice> CreateErrorDevice();
......
...@@ -17,91 +17,24 @@ ...@@ -17,91 +17,24 @@
#include "base/test/test_timeouts.h" #include "base/test/test_timeouts.h"
#include "base/threading/thread.h" #include "base/threading/thread.h"
#include "build/build_config.h" #include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h" #include "media/base/media_switches.h"
#include "media/capture/video/fake_video_capture_device_factory.h" #include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/mock_video_capture_device_client.h" #include "media/capture/video/mock_video_capture_device_client.h"
#include "media/capture/video/video_capture_device.h" #include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h" #include "media/capture/video_capture_types.h"
#include "media/video/fake_gpu_memory_buffer.h"
#include "testing/gmock/include/gmock/gmock.h" #include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
using ::testing::_; using ::testing::_;
using ::testing::Bool;
using ::testing::Combine; using ::testing::Combine;
using ::testing::Invoke;
using ::testing::SaveArg;
using ::testing::Values; using ::testing::Values;
namespace media { namespace media {
namespace { namespace {
class StubBufferHandle : public VideoCaptureBufferHandle {
public:
StubBufferHandle(size_t mapped_size, uint8_t* data)
: mapped_size_(mapped_size), data_(data) {}
size_t mapped_size() const override { return mapped_size_; }
uint8_t* data() const override { return data_; }
const uint8_t* const_data() const override { return data_; }
private:
const size_t mapped_size_;
uint8_t* const data_;
};
class StubBufferHandleProvider
: public VideoCaptureDevice::Client::Buffer::HandleProvider {
public:
StubBufferHandleProvider(size_t mapped_size, uint8_t* data)
: mapped_size_(mapped_size), data_(data) {}
~StubBufferHandleProvider() override = default;
base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
NOTREACHED();
return {};
}
mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
NOTREACHED();
return mojo::ScopedSharedBufferHandle();
}
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
override {
return std::make_unique<StubBufferHandle>(mapped_size_, data_);
}
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
return gfx::GpuMemoryBufferHandle();
}
private:
const size_t mapped_size_;
uint8_t* const data_;
};
class StubReadWritePermission
: public VideoCaptureDevice::Client::Buffer::ScopedAccessPermission {
public:
StubReadWritePermission(uint8_t* data) : data_(data) {}
~StubReadWritePermission() override { delete[] data_; }
private:
uint8_t* const data_;
};
VideoCaptureDevice::Client::Buffer CreateStubBuffer(int buffer_id,
size_t mapped_size) {
auto* buffer = new uint8_t[mapped_size];
const int arbitrary_frame_feedback_id = 0;
return VideoCaptureDevice::Client::Buffer(
buffer_id, arbitrary_frame_feedback_id,
std::make_unique<StubBufferHandleProvider>(mapped_size, buffer),
std::make_unique<StubReadWritePermission>(buffer));
}
class ImageCaptureClient : public base::RefCounted<ImageCaptureClient> { class ImageCaptureClient : public base::RefCounted<ImageCaptureClient> {
public: public:
// GMock doesn't support move-only arguments, so we use this forward method. // GMock doesn't support move-only arguments, so we use this forward method.
...@@ -149,43 +82,10 @@ class FakeVideoCaptureDeviceTestBase : public ::testing::Test { ...@@ -149,43 +82,10 @@ class FakeVideoCaptureDeviceTestBase : public ::testing::Test {
void SetUp() override { EXPECT_CALL(*client_, OnError(_, _, _)).Times(0); } void SetUp() override { EXPECT_CALL(*client_, OnError(_, _, _)).Times(0); }
std::unique_ptr<MockVideoCaptureDeviceClient> CreateClient() { std::unique_ptr<MockVideoCaptureDeviceClient> CreateClient() {
auto result = std::make_unique<NiceMockVideoCaptureDeviceClient>(); return MockVideoCaptureDeviceClient::CreateMockClientWithBufferAllocator(
ON_CALL(*result, ReserveOutputBuffer(_, _, _, _)) BindToCurrentLoop(base::BindRepeating(
.WillByDefault( &FakeVideoCaptureDeviceTestBase::OnFrameCaptured,
Invoke([](const gfx::Size& dimensions, VideoPixelFormat format, int, base::Unretained(this))));
VideoCaptureDevice::Client::Buffer* buffer) {
EXPECT_GT(dimensions.GetArea(), 0);
const VideoCaptureFormat frame_format(dimensions, 0.0, format);
*buffer = CreateStubBuffer(0, frame_format.ImageAllocationSize());
return VideoCaptureDevice::Client::ReserveResult::kSucceeded;
}));
ON_CALL(*result, OnIncomingCapturedData(_, _, _, _, _, _, _, _, _))
.WillByDefault(Invoke(
[this](const uint8_t*, int,
const media::VideoCaptureFormat& frame_format,
const gfx::ColorSpace&, int, bool, base::TimeTicks,
base::TimeDelta, int) { OnFrameCaptured(frame_format); }));
ON_CALL(*result, OnIncomingCapturedGfxBuffer(_, _, _, _, _, _))
.WillByDefault(
Invoke([this](gfx::GpuMemoryBuffer*,
const media::VideoCaptureFormat& frame_format, int,
base::TimeTicks, base::TimeDelta,
int) { OnFrameCaptured(frame_format); }));
ON_CALL(*result, DoOnIncomingCapturedBuffer(_, _, _, _))
.WillByDefault(
Invoke([this](media::VideoCaptureDevice::Client::Buffer&,
const media::VideoCaptureFormat& frame_format,
base::TimeTicks,
base::TimeDelta) { OnFrameCaptured(frame_format); }));
ON_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _, _))
.WillByDefault(Invoke(
[this](media::VideoCaptureDevice::Client::Buffer&,
const media::VideoCaptureFormat& frame_format,
const gfx::ColorSpace&, base::TimeTicks, base::TimeDelta,
gfx::Rect, const media::VideoFrameMetadata&) {
OnFrameCaptured(frame_format);
}));
return result;
} }
void OnFrameCaptured(const VideoCaptureFormat& format) { void OnFrameCaptured(const VideoCaptureFormat& format) {
...@@ -220,9 +120,12 @@ class FakeVideoCaptureDeviceTest ...@@ -220,9 +120,12 @@ class FakeVideoCaptureDeviceTest
// Tests that a frame is delivered with the expected settings. // Tests that a frame is delivered with the expected settings.
// Sweeps through a fixed set of requested/expected resolutions. // Sweeps through a fixed set of requested/expected resolutions.
TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) { TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) {
if (testing::get<1>(GetParam()) == const auto pixel_format = testing::get<0>(GetParam());
const auto delivery_mode = testing::get<1>(GetParam());
const auto frame_rate = testing::get<2>(GetParam());
if (delivery_mode ==
FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS && FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS &&
testing::get<0>(GetParam()) == PIXEL_FORMAT_MJPEG) { pixel_format == PIXEL_FORMAT_MJPEG) {
// Unsupported case // Unsupported case
return; return;
} }
...@@ -232,8 +135,8 @@ TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) { ...@@ -232,8 +135,8 @@ TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) {
std::unique_ptr<VideoCaptureDevice> device = std::unique_ptr<VideoCaptureDevice> device =
FakeVideoCaptureDeviceFactory::CreateDeviceWithDefaultResolutions( FakeVideoCaptureDeviceFactory::CreateDeviceWithDefaultResolutions(
testing::get<0>(GetParam()), testing::get<1>(GetParam()), pixel_format, delivery_mode, frame_rate,
testing::get<2>(GetParam())); std::make_unique<FakeGpuMemoryBufferSupport>());
ASSERT_TRUE(device); ASSERT_TRUE(device);
// First: Requested, Second: Expected // First: Requested, Second: Expected
...@@ -252,13 +155,23 @@ TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) { ...@@ -252,13 +155,23 @@ TEST_P(FakeVideoCaptureDeviceTest, CaptureUsing) {
VideoCaptureParams capture_params; VideoCaptureParams capture_params;
capture_params.requested_format.frame_size = resolution.first; capture_params.requested_format.frame_size = resolution.first;
capture_params.requested_format.frame_rate = testing::get<2>(GetParam()); capture_params.requested_format.frame_rate = frame_rate;
if (delivery_mode ==
FakeVideoCaptureDevice::DeliveryMode::USE_GPU_MEMORY_BUFFERS) {
capture_params.buffer_type = VideoCaptureBufferType::kGpuMemoryBuffer;
}
device->AllocateAndStart(capture_params, std::move(client)); device->AllocateAndStart(capture_params, std::move(client));
WaitForCapturedFrame(); WaitForCapturedFrame();
EXPECT_EQ(resolution.second.width(), last_format().frame_size.width()); EXPECT_EQ(resolution.second.width(), last_format().frame_size.width());
EXPECT_EQ(resolution.second.height(), last_format().frame_size.height()); EXPECT_EQ(resolution.second.height(), last_format().frame_size.height());
EXPECT_EQ(last_format().pixel_format, testing::get<0>(GetParam())); if (delivery_mode ==
FakeVideoCaptureDevice::DeliveryMode::USE_GPU_MEMORY_BUFFERS) {
// NV12 is the only opaque format backing GpuMemoryBuffer.
EXPECT_EQ(last_format().pixel_format, PIXEL_FORMAT_NV12);
} else {
EXPECT_EQ(last_format().pixel_format, pixel_format);
}
EXPECT_EQ(last_format().frame_rate, testing::get<2>(GetParam())); EXPECT_EQ(last_format().frame_rate, testing::get<2>(GetParam()));
device->StopAndDeAllocate(); device->StopAndDeAllocate();
} }
...@@ -271,7 +184,8 @@ INSTANTIATE_TEST_SUITE_P( ...@@ -271,7 +184,8 @@ INSTANTIATE_TEST_SUITE_P(
Values(PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_MJPEG), Values(PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_MJPEG),
Values( Values(
FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS, FakeVideoCaptureDevice::DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS,
FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS), FakeVideoCaptureDevice::DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS,
FakeVideoCaptureDevice::DeliveryMode::USE_GPU_MEMORY_BUFFERS),
Values(20, 29.97, 30, 50, 60))); Values(20, 29.97, 30, 50, 60)));
TEST_F(FakeVideoCaptureDeviceTest, GetDeviceSupportedFormats) { TEST_F(FakeVideoCaptureDeviceTest, GetDeviceSupportedFormats) {
......
...@@ -17,8 +17,10 @@ ...@@ -17,8 +17,10 @@
#include "base/threading/thread_task_runner_handle.h" #include "base/threading/thread_task_runner_handle.h"
#include "media/capture/mojom/image_capture_types.h" #include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/blob_utils.h" #include "media/capture/video/blob_utils.h"
#include "media/capture/video/gpu_memory_buffer_utils.h"
#include "media/capture/video_capture_types.h" #include "media/capture/video_capture_types.h"
#include "media/parsers/jpeg_parser.h" #include "media/parsers/jpeg_parser.h"
#include "third_party/libyuv/include/libyuv.h"
namespace media { namespace media {
...@@ -300,8 +302,14 @@ std::unique_ptr<VideoFileParser> FileVideoCaptureDevice::GetVideoFileParser( ...@@ -300,8 +302,14 @@ std::unique_ptr<VideoFileParser> FileVideoCaptureDevice::GetVideoFileParser(
return file_parser; return file_parser;
} }
FileVideoCaptureDevice::FileVideoCaptureDevice(const base::FilePath& file_path) FileVideoCaptureDevice::FileVideoCaptureDevice(
: capture_thread_("CaptureThread"), file_path_(file_path) {} const base::FilePath& file_path,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support)
: capture_thread_("CaptureThread"),
file_path_(file_path),
gmb_support_(gmb_support
? std::move(gmb_support)
: std::make_unique<gpu::GpuMemoryBufferSupport>()) {}
FileVideoCaptureDevice::~FileVideoCaptureDevice() { FileVideoCaptureDevice::~FileVideoCaptureDevice() {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
...@@ -392,6 +400,9 @@ void FileVideoCaptureDevice::OnAllocateAndStart( ...@@ -392,6 +400,9 @@ void FileVideoCaptureDevice::OnAllocateAndStart(
client_ = std::move(client); client_ = std::move(client);
if (params.buffer_type == VideoCaptureBufferType::kGpuMemoryBuffer)
video_capture_use_gmb_ = true;
DCHECK(!file_parser_); DCHECK(!file_parser_);
file_parser_ = GetVideoFileParser(file_path_, &capture_format_); file_parser_ = GetVideoFileParser(file_path_, &capture_format_);
if (!file_parser_) { if (!file_parser_) {
...@@ -431,12 +442,50 @@ void FileVideoCaptureDevice::OnCaptureTask() { ...@@ -431,12 +442,50 @@ void FileVideoCaptureDevice::OnCaptureTask() {
const base::TimeTicks current_time = base::TimeTicks::Now(); const base::TimeTicks current_time = base::TimeTicks::Now();
if (first_ref_time_.is_null()) if (first_ref_time_.is_null())
first_ref_time_ = current_time; first_ref_time_ = current_time;
// Leave the color space unset for compatibility purposes but this
// information should be retrieved from the container when possible. if (video_capture_use_gmb_) {
client_->OnIncomingCapturedData(frame_ptr, frame_size, capture_format_, const gfx::Size& buffer_size = capture_format_.frame_size;
gfx::ColorSpace(), 0 /* clockwise_rotation */, std::unique_ptr<gfx::GpuMemoryBuffer> gmb;
false /* flip_y */, current_time, VideoCaptureDevice::Client::Buffer capture_buffer;
current_time - first_ref_time_); auto reserve_result = AllocateNV12GpuMemoryBuffer(
client_.get(), buffer_size, gmb_support_.get(), &gmb, &capture_buffer);
if (reserve_result !=
VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
client_->OnFrameDropped(
ConvertReservationFailureToFrameDropReason(reserve_result));
return;
}
ScopedNV12GpuMemoryBufferMapping scoped_mapping(std::move(gmb));
const uint8_t* src_y_plane = frame_ptr;
const uint8_t* src_u_plane =
frame_ptr +
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, 0, buffer_size).GetArea();
const uint8_t* src_v_plane =
frame_ptr +
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, 0, buffer_size).GetArea() +
VideoFrame::PlaneSize(PIXEL_FORMAT_I420, 1, buffer_size).GetArea();
libyuv::I420ToNV12(
src_y_plane, buffer_size.width(), src_u_plane, buffer_size.width() / 2,
src_v_plane, buffer_size.width() / 2, scoped_mapping.y_plane(),
scoped_mapping.y_stride(), scoped_mapping.uv_plane(),
scoped_mapping.uv_stride(), buffer_size.width(), buffer_size.height());
VideoCaptureFormat modified_format = capture_format_;
// When GpuMemoryBuffer is used, the frame data is opaque to the CPU for
// most of the time. Currently the only supported underlying format is
// NV12.
modified_format.pixel_format = PIXEL_FORMAT_NV12;
client_->OnIncomingCapturedBuffer(std::move(capture_buffer),
modified_format, current_time,
current_time - first_ref_time_);
} else {
// Leave the color space unset for compatibility purposes but this
// information should be retrieved from the container when possible.
client_->OnIncomingCapturedData(
frame_ptr, frame_size, capture_format_, gfx::ColorSpace(),
0 /* clockwise_rotation */, false /* flip_y */, current_time,
current_time - first_ref_time_);
}
// Process waiting photo callbacks // Process waiting photo callbacks
while (!take_photo_callbacks_.empty()) { while (!take_photo_callbacks_.empty()) {
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include "base/macros.h" #include "base/macros.h"
#include "base/threading/thread.h" #include "base/threading/thread.h"
#include "base/threading/thread_checker.h" #include "base/threading/thread_checker.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/capture/video/video_capture_device.h" #include "media/capture/video/video_capture_device.h"
namespace media { namespace media {
...@@ -43,7 +44,9 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice { ...@@ -43,7 +44,9 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
// Constructor of the class, with a fully qualified file path as input, which // Constructor of the class, with a fully qualified file path as input, which
// represents the Y4M or MJPEG file to stream repeatedly. // represents the Y4M or MJPEG file to stream repeatedly.
explicit FileVideoCaptureDevice(const base::FilePath& file_path); explicit FileVideoCaptureDevice(
const base::FilePath& file_path,
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support = nullptr);
// VideoCaptureDevice implementation, class methods. // VideoCaptureDevice implementation, class methods.
~FileVideoCaptureDevice() override; ~FileVideoCaptureDevice() override;
...@@ -88,6 +91,10 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice { ...@@ -88,6 +91,10 @@ class CAPTURE_EXPORT FileVideoCaptureDevice : public VideoCaptureDevice {
// The system time when we receive the first frame. // The system time when we receive the first frame.
base::TimeTicks first_ref_time_; base::TimeTicks first_ref_time_;
// Whether GpuMemoryBuffer-based video capture buffer is enabled or not.
bool video_capture_use_gmb_ = false;
std::unique_ptr<gpu::GpuMemoryBufferSupport> gmb_support_;
// Guards the below variables from concurrent access between methods running // Guards the below variables from concurrent access between methods running
// on the main thread and |capture_thread_|. // on the main thread and |capture_thread_|.
base::Lock lock_; base::Lock lock_;
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#include "media/base/test_data_util.h" #include "media/base/test_data_util.h"
#include "media/capture/video/file_video_capture_device.h" #include "media/capture/video/file_video_capture_device.h"
#include "media/capture/video/mock_video_capture_device_client.h" #include "media/capture/video/mock_video_capture_device_client.h"
#include "media/video/fake_gpu_memory_buffer.h"
#include "testing/gmock/include/gmock/gmock.h" #include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
...@@ -57,17 +58,36 @@ class FileVideoCaptureDeviceTest : public ::testing::Test { ...@@ -57,17 +58,36 @@ class FileVideoCaptureDeviceTest : public ::testing::Test {
EXPECT_CALL(*client_, OnError(_, _, _)).Times(0); EXPECT_CALL(*client_, OnError(_, _, _)).Times(0);
EXPECT_CALL(*client_, OnStarted()); EXPECT_CALL(*client_, OnStarted());
device_ = std::make_unique<FileVideoCaptureDevice>( device_ = std::make_unique<FileVideoCaptureDevice>(
GetTestDataFilePath("bear.mjpeg")); GetTestDataFilePath("bear.mjpeg"),
std::make_unique<FakeGpuMemoryBufferSupport>());
device_->AllocateAndStart(VideoCaptureParams(), std::move(client_)); device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
} }
void TearDown() override { device_->StopAndDeAllocate(); } void TearDown() override { device_->StopAndDeAllocate(); }
std::unique_ptr<MockVideoCaptureDeviceClient> CreateClient() {
return MockVideoCaptureDeviceClient::CreateMockClientWithBufferAllocator(
BindToCurrentLoop(
base::BindRepeating(&FileVideoCaptureDeviceTest::OnFrameCaptured,
base::Unretained(this))));
}
void OnFrameCaptured(const VideoCaptureFormat& format) {
last_format_ = format;
run_loop_->Quit();
}
void WaitForCapturedFrame() {
run_loop_.reset(new base::RunLoop());
run_loop_->Run();
}
std::unique_ptr<NiceMockVideoCaptureDeviceClient> client_; std::unique_ptr<NiceMockVideoCaptureDeviceClient> client_;
MockImageCaptureClient image_capture_client_; MockImageCaptureClient image_capture_client_;
std::unique_ptr<VideoCaptureDevice> device_; std::unique_ptr<VideoCaptureDevice> device_;
VideoCaptureFormat last_format_; VideoCaptureFormat last_format_;
base::test::TaskEnvironment task_environment_; base::test::TaskEnvironment task_environment_;
std::unique_ptr<base::RunLoop> run_loop_;
}; };
TEST_F(FileVideoCaptureDeviceTest, GetPhotoState) { TEST_F(FileVideoCaptureDeviceTest, GetPhotoState) {
...@@ -105,4 +125,17 @@ TEST_F(FileVideoCaptureDeviceTest, TakePhoto) { ...@@ -105,4 +125,17 @@ TEST_F(FileVideoCaptureDeviceTest, TakePhoto) {
run_loop.Run(); run_loop.Run();
} }
TEST_F(FileVideoCaptureDeviceTest, CaptureWithGpuMemoryBuffer) {
auto client = CreateClient();
VideoCaptureParams params;
params.buffer_type = VideoCaptureBufferType::kGpuMemoryBuffer;
auto device = std::make_unique<FileVideoCaptureDevice>(
GetTestDataFilePath("bear.mjpeg"),
std::make_unique<FakeGpuMemoryBufferSupport>());
device->AllocateAndStart(params, std::move(client));
WaitForCapturedFrame();
EXPECT_EQ(last_format_.pixel_format, PIXEL_FORMAT_NV12);
device->StopAndDeAllocate();
}
} // namespace media } // namespace media
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/gpu_memory_buffer_utils.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
ScopedNV12GpuMemoryBufferMapping::ScopedNV12GpuMemoryBufferMapping(
std::unique_ptr<gfx::GpuMemoryBuffer> gmb)
: gmb_(std::move(gmb)) {
gmb_->Map();
}
ScopedNV12GpuMemoryBufferMapping::~ScopedNV12GpuMemoryBufferMapping() {
gmb_->Unmap();
}
uint8_t* ScopedNV12GpuMemoryBufferMapping::y_plane() {
return static_cast<uint8_t*>(gmb_->memory(0));
}
uint8_t* ScopedNV12GpuMemoryBufferMapping::uv_plane() {
return static_cast<uint8_t*>(gmb_->memory(1));
}
size_t ScopedNV12GpuMemoryBufferMapping::y_stride() {
return gmb_->stride(0);
}
size_t ScopedNV12GpuMemoryBufferMapping::uv_stride() {
return gmb_->stride(1);
}
VideoCaptureDevice::Client::ReserveResult AllocateNV12GpuMemoryBuffer(
VideoCaptureDevice::Client* capture_client,
const gfx::Size& buffer_size,
gpu::GpuMemoryBufferSupport* gmb_support,
std::unique_ptr<gfx::GpuMemoryBuffer>* out_gpu_memory_buffer,
VideoCaptureDevice::Client::Buffer* out_capture_buffer) {
CHECK(out_gpu_memory_buffer);
CHECK(out_capture_buffer);
// When GpuMemoryBuffer is used, the frame data is opaque to the CPU for most
// of the time. Currently the only supported underlying format is NV12.
constexpr VideoPixelFormat kOpaqueVideoFormat = PIXEL_FORMAT_NV12;
constexpr gfx::BufferFormat kOpaqueGfxFormat =
gfx::BufferFormat::YUV_420_BIPLANAR;
const int arbitrary_frame_feedback_id = 0;
const auto reserve_result = capture_client->ReserveOutputBuffer(
buffer_size, kOpaqueVideoFormat, arbitrary_frame_feedback_id,
out_capture_buffer);
if (reserve_result != VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
return reserve_result;
}
*out_gpu_memory_buffer = gmb_support->CreateGpuMemoryBufferImplFromHandle(
out_capture_buffer->handle_provider->GetGpuMemoryBufferHandle(),
buffer_size, kOpaqueGfxFormat,
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE, base::NullCallback());
return reserve_result;
}
} // namespace media
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAPTURE_VIDEO_GPU_MEMORY_BUFFER_UTILS_H_
#define MEDIA_CAPTURE_VIDEO_GPU_MEMORY_BUFFER_UTILS_H_
#include <memory>
#include "media/capture/video/video_capture_device.h"
namespace gfx {
class GpuMemoryBuffer;
} // namespace gfx
namespace gpu {
class GpuMemoryBufferSupport;
} // namespace gpu
// Utility class and function for creating and accessing video capture client
// buffers backed with GpuMemoryBuffer buffers.
namespace media {
class ScopedNV12GpuMemoryBufferMapping {
public:
explicit ScopedNV12GpuMemoryBufferMapping(
std::unique_ptr<gfx::GpuMemoryBuffer> gmb);
~ScopedNV12GpuMemoryBufferMapping();
uint8_t* y_plane();
uint8_t* uv_plane();
size_t y_stride();
size_t uv_stride();
private:
std::unique_ptr<gfx::GpuMemoryBuffer> gmb_;
};
VideoCaptureDevice::Client::ReserveResult AllocateNV12GpuMemoryBuffer(
VideoCaptureDevice::Client* capture_client,
const gfx::Size& buffer_size,
gpu::GpuMemoryBufferSupport* gmb_support,
std::unique_ptr<gfx::GpuMemoryBuffer>* out_gpu_memory_buffer,
VideoCaptureDevice::Client::Buffer* out_capture_buffer);
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_GPU_MEMORY_BUFFER_UTILS_H_
...@@ -4,8 +4,81 @@ ...@@ -4,8 +4,81 @@
#include "media/capture/video/mock_video_capture_device_client.h" #include "media/capture/video/mock_video_capture_device_client.h"
using testing::_;
using testing::Invoke;
namespace media { namespace media {
namespace {
class StubBufferHandle : public VideoCaptureBufferHandle {
public:
StubBufferHandle(size_t mapped_size, uint8_t* data)
: mapped_size_(mapped_size), data_(data) {}
size_t mapped_size() const override { return mapped_size_; }
uint8_t* data() const override { return data_; }
const uint8_t* const_data() const override { return data_; }
private:
const size_t mapped_size_;
uint8_t* const data_;
};
class StubBufferHandleProvider
: public VideoCaptureDevice::Client::Buffer::HandleProvider {
public:
StubBufferHandleProvider(size_t mapped_size, uint8_t* data)
: mapped_size_(mapped_size), data_(data) {}
~StubBufferHandleProvider() override = default;
base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
NOTREACHED();
return {};
}
mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
NOTREACHED();
return mojo::ScopedSharedBufferHandle();
}
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
override {
return std::make_unique<StubBufferHandle>(mapped_size_, data_);
}
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
return gfx::GpuMemoryBufferHandle();
}
private:
const size_t mapped_size_;
uint8_t* const data_;
};
class StubReadWritePermission
: public VideoCaptureDevice::Client::Buffer::ScopedAccessPermission {
public:
StubReadWritePermission(uint8_t* data) : data_(data) {}
~StubReadWritePermission() override { delete[] data_; }
private:
uint8_t* const data_;
};
VideoCaptureDevice::Client::Buffer CreateStubBuffer(int buffer_id,
size_t mapped_size) {
auto* buffer = new uint8_t[mapped_size];
const int arbitrary_frame_feedback_id = 0;
return VideoCaptureDevice::Client::Buffer(
buffer_id, arbitrary_frame_feedback_id,
std::make_unique<StubBufferHandleProvider>(mapped_size, buffer),
std::make_unique<StubReadWritePermission>(buffer));
}
} // namespace
MockVideoCaptureDeviceClient::MockVideoCaptureDeviceClient() = default; MockVideoCaptureDeviceClient::MockVideoCaptureDeviceClient() = default;
MockVideoCaptureDeviceClient::~MockVideoCaptureDeviceClient() = default; MockVideoCaptureDeviceClient::~MockVideoCaptureDeviceClient() = default;
...@@ -28,4 +101,55 @@ void MockVideoCaptureDeviceClient::OnIncomingCapturedBufferExt( ...@@ -28,4 +101,55 @@ void MockVideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
timestamp, visible_rect, additional_metadata); timestamp, visible_rect, additional_metadata);
} }
// static
std::unique_ptr<MockVideoCaptureDeviceClient>
MockVideoCaptureDeviceClient::CreateMockClientWithBufferAllocator(
FakeFrameCapturedCallback frame_captured_callback) {
auto result = std::make_unique<NiceMockVideoCaptureDeviceClient>();
result->fake_frame_captured_callback_ = std::move(frame_captured_callback);
auto* raw_result_ptr = result.get();
ON_CALL(*result, ReserveOutputBuffer(_, _, _, _))
.WillByDefault(
Invoke([](const gfx::Size& dimensions, VideoPixelFormat format, int,
VideoCaptureDevice::Client::Buffer* buffer) {
EXPECT_GT(dimensions.GetArea(), 0);
const VideoCaptureFormat frame_format(dimensions, 0.0, format);
*buffer = CreateStubBuffer(0, frame_format.ImageAllocationSize());
return VideoCaptureDevice::Client::ReserveResult::kSucceeded;
}));
ON_CALL(*result, OnIncomingCapturedData(_, _, _, _, _, _, _, _, _))
.WillByDefault(
Invoke([raw_result_ptr](const uint8_t*, int,
const media::VideoCaptureFormat& frame_format,
const gfx::ColorSpace&, int, bool,
base::TimeTicks, base::TimeDelta, int) {
raw_result_ptr->fake_frame_captured_callback_.Run(frame_format);
}));
ON_CALL(*result, OnIncomingCapturedGfxBuffer(_, _, _, _, _, _))
.WillByDefault(
Invoke([raw_result_ptr](gfx::GpuMemoryBuffer*,
const media::VideoCaptureFormat& frame_format,
int, base::TimeTicks, base::TimeDelta, int) {
raw_result_ptr->fake_frame_captured_callback_.Run(frame_format);
}));
ON_CALL(*result, DoOnIncomingCapturedBuffer(_, _, _, _))
.WillByDefault(
Invoke([raw_result_ptr](media::VideoCaptureDevice::Client::Buffer&,
const media::VideoCaptureFormat& frame_format,
base::TimeTicks, base::TimeDelta) {
raw_result_ptr->fake_frame_captured_callback_.Run(frame_format);
}));
ON_CALL(*result, DoOnIncomingCapturedBufferExt(_, _, _, _, _, _, _))
.WillByDefault(
Invoke([raw_result_ptr](media::VideoCaptureDevice::Client::Buffer&,
const media::VideoCaptureFormat& frame_format,
const gfx::ColorSpace&, base::TimeTicks,
base::TimeDelta, gfx::Rect,
const media::VideoFrameMetadata&) {
raw_result_ptr->fake_frame_captured_callback_.Run(frame_format);
}));
return result;
}
} // namespace media } // namespace media
...@@ -10,6 +10,9 @@ ...@@ -10,6 +10,9 @@
namespace media { namespace media {
using FakeFrameCapturedCallback =
base::RepeatingCallback<void(const VideoCaptureFormat&)>;
class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client { class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
public: public:
MockVideoCaptureDeviceClient(); MockVideoCaptureDeviceClient();
...@@ -68,6 +71,13 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client { ...@@ -68,6 +71,13 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
base::TimeDelta timestamp, base::TimeDelta timestamp,
gfx::Rect visible_rect, gfx::Rect visible_rect,
const media::VideoFrameMetadata& additional_metadata)); const media::VideoFrameMetadata& additional_metadata));
static std::unique_ptr<MockVideoCaptureDeviceClient>
CreateMockClientWithBufferAllocator(
FakeFrameCapturedCallback frame_captured_callback);
protected:
FakeFrameCapturedCallback fake_frame_captured_callback_;
}; };
using NiceMockVideoCaptureDeviceClient = using NiceMockVideoCaptureDeviceClient =
......
...@@ -14,6 +14,36 @@ ...@@ -14,6 +14,36 @@
namespace media { namespace media {
namespace {
class FakeGpuMemoryBufferImpl : public gpu::GpuMemoryBufferImpl {
public:
FakeGpuMemoryBufferImpl(const gfx::Size& size, gfx::BufferFormat format)
: gpu::GpuMemoryBufferImpl(
gfx::GpuMemoryBufferId(),
size,
format,
gpu::GpuMemoryBufferImpl::DestructionCallback()),
fake_gmb_(std::make_unique<media::FakeGpuMemoryBuffer>(size, format)) {}
// gfx::GpuMemoryBuffer implementation
bool Map() override { return fake_gmb_->Map(); }
void* memory(size_t plane) override { return fake_gmb_->memory(plane); }
void Unmap() override { fake_gmb_->Unmap(); }
int stride(size_t plane) const override { return fake_gmb_->stride(plane); }
gfx::GpuMemoryBufferType GetType() const override {
return fake_gmb_->GetType();
}
gfx::GpuMemoryBufferHandle CloneHandle() const override {
return fake_gmb_->CloneHandle();
}
private:
std::unique_ptr<media::FakeGpuMemoryBuffer> fake_gmb_;
};
} // namespace
#if defined(OS_LINUX) #if defined(OS_LINUX)
base::ScopedFD GetDummyFD() { base::ScopedFD GetDummyFD() {
base::ScopedFD fd(open("/dev/zero", O_RDWR)); base::ScopedFD fd(open("/dev/zero", O_RDWR));
...@@ -123,4 +153,14 @@ void FakeGpuMemoryBuffer::OnMemoryDump( ...@@ -123,4 +153,14 @@ void FakeGpuMemoryBuffer::OnMemoryDump(
uint64_t tracing_process_id, uint64_t tracing_process_id,
int importance) const {} int importance) const {}
std::unique_ptr<gpu::GpuMemoryBufferImpl>
FakeGpuMemoryBufferSupport::CreateGpuMemoryBufferImplFromHandle(
gfx::GpuMemoryBufferHandle handle,
const gfx::Size& size,
gfx::BufferFormat format,
gfx::BufferUsage usage,
gpu::GpuMemoryBufferImpl::DestructionCallback callback) {
return std::make_unique<FakeGpuMemoryBufferImpl>(size, format);
}
} // namespace media } // namespace media
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include <memory> #include <memory>
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "ui/gfx/gpu_memory_buffer.h" #include "ui/gfx/gpu_memory_buffer.h"
namespace media { namespace media {
...@@ -43,6 +44,16 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer { ...@@ -43,6 +44,16 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
DISALLOW_IMPLICIT_CONSTRUCTORS(FakeGpuMemoryBuffer); DISALLOW_IMPLICIT_CONSTRUCTORS(FakeGpuMemoryBuffer);
}; };
class FakeGpuMemoryBufferSupport : public gpu::GpuMemoryBufferSupport {
public:
std::unique_ptr<gpu::GpuMemoryBufferImpl> CreateGpuMemoryBufferImplFromHandle(
gfx::GpuMemoryBufferHandle handle,
const gfx::Size& size,
gfx::BufferFormat format,
gfx::BufferUsage usage,
gpu::GpuMemoryBufferImpl::DestructionCallback callback) override;
};
} // namespace media } // namespace media
#endif // MEDIA_VIDEO_FAKE_GPU_MEMORY_BUFFER_H_ #endif // MEDIA_VIDEO_FAKE_GPU_MEMORY_BUFFER_H_
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment