Commit 752b4919 authored by Fish Lin's avatar Fish Lin Committed by Commit Bot

Make video encode accelerator support DMABUF frame with NV12

First, make mojo support deliver storage type, this is needed for
v4l2_video_encode_accelerator to read DMABUF correctly, otherwise
it will use kShmem. Sencond, DMABUF contains stride, size, and file
handle, add the conversion code between VideoFrameData and VideoFrame.
Current implementation only support 2 planes DMABUF for NV12, if we'll
support more format, may need to modify some code to support them.

BUG=chromium:963812
TEST=Deliver DMABUF frame from camera and send it into video encoder
through mojo interface, and it can encode correct video out.

Change-Id: Ibf9614b6389e080bc0599226c4a20a410bb9666b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1631160
Commit-Queue: Fish Lin <linfish@google.com>
Auto-Submit: Fish Lin <linfish@google.com>
Reviewed-by: default avatarChrome Cunningham <chcunningham@chromium.org>
Reviewed-by: default avatarDominick Ng <dominickn@chromium.org>
Cr-Commit-Position: refs/heads/master@{#665061}
parent 7ad82ca4
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "base/bind.h" #include "base/bind.h"
#include "base/bind_helpers.h" #include "base/bind_helpers.h"
#include "base/logging.h" #include "base/logging.h"
#include "build/build_config.h"
#include "gpu/ipc/client/gpu_channel_host.h" #include "gpu/ipc/client/gpu_channel_host.h"
#include "media/base/video_frame.h" #include "media/base/video_frame.h"
#include "media/gpu/gpu_video_accelerator_util.h" #include "media/gpu/gpu_video_accelerator_util.h"
...@@ -119,6 +120,21 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame, ...@@ -119,6 +120,21 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
bool force_keyframe) { bool force_keyframe) {
DVLOG(2) << __func__ << " tstamp=" << frame->timestamp(); DVLOG(2) << __func__ << " tstamp=" << frame->timestamp();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(VideoFrame::NumPlanes(frame->format()),
frame->layout().num_planes());
DCHECK(vea_.is_bound());
#if defined(OS_LINUX)
if (frame->storage_type() == VideoFrame::STORAGE_DMABUFS) {
DCHECK(frame->HasDmaBufs());
vea_->Encode(
std::move(frame), force_keyframe,
base::BindOnce(base::DoNothing::Once<scoped_refptr<VideoFrame>>(),
frame));
return;
}
#endif
DCHECK_EQ(PIXEL_FORMAT_I420, frame->format()); DCHECK_EQ(PIXEL_FORMAT_I420, frame->format());
DCHECK_EQ(VideoFrame::STORAGE_SHMEM, frame->storage_type()); DCHECK_EQ(VideoFrame::STORAGE_SHMEM, frame->storage_type());
DCHECK(frame->shared_memory_handle().IsValid()); DCHECK(frame->shared_memory_handle().IsValid());
...@@ -154,7 +170,6 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame, ...@@ -154,7 +170,6 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
// Encode() is synchronous: clients will assume full ownership of |frame| when // Encode() is synchronous: clients will assume full ownership of |frame| when
// this gets destroyed and probably recycle its shared_memory_handle(): keep // this gets destroyed and probably recycle its shared_memory_handle(): keep
// the former alive until the remote end is actually finished. // the former alive until the remote end is actually finished.
DCHECK(vea_.is_bound());
vea_->Encode( vea_->Encode(
std::move(mojo_frame), force_keyframe, std::move(mojo_frame), force_keyframe,
base::BindOnce(base::DoNothing::Once<scoped_refptr<VideoFrame>>(), base::BindOnce(base::DoNothing::Once<scoped_refptr<VideoFrame>>(),
......
...@@ -272,6 +272,7 @@ struct VideoFrame { ...@@ -272,6 +272,7 @@ struct VideoFrame {
union VideoFrameData { union VideoFrameData {
EosVideoFrameData eos_data; EosVideoFrameData eos_data;
SharedBufferVideoFrameData shared_buffer_data; SharedBufferVideoFrameData shared_buffer_data;
DmabufVideoFrameData dmabuf_data;
MailboxVideoFrameData mailbox_data; MailboxVideoFrameData mailbox_data;
}; };
...@@ -295,6 +296,12 @@ struct SharedBufferVideoFrameData { ...@@ -295,6 +296,12 @@ struct SharedBufferVideoFrameData {
uint64 v_offset; uint64 v_offset;
}; };
// This defines video frame data stored in dmabuf.
struct DmabufVideoFrameData {
// Size depends on media::VideoFrame::NumPlanes with frame format.
array<handle> dmabuf_fds;
};
// This defines video frame data stored in texture mailboxes. // This defines video frame data stored in texture mailboxes.
struct MailboxVideoFrameData { struct MailboxVideoFrameData {
// Size must be kept in sync with media::VideoFrame::kMaxPlanes. // Size must be kept in sync with media::VideoFrame::kMaxPlanes.
......
...@@ -51,6 +51,12 @@ struct VideoEncodeAcceleratorConfig { ...@@ -51,6 +51,12 @@ struct VideoEncodeAcceleratorConfig {
kDisplay kDisplay
}; };
// See media::VideoEncodeAccelerator::Config::StorageType
enum StorageType {
kShmem,
kDmabuf
};
VideoPixelFormat input_format; VideoPixelFormat input_format;
gfx.mojom.Size input_visible_size; gfx.mojom.Size input_visible_size;
VideoCodecProfile output_profile; VideoCodecProfile output_profile;
...@@ -61,6 +67,8 @@ struct VideoEncodeAcceleratorConfig { ...@@ -61,6 +67,8 @@ struct VideoEncodeAcceleratorConfig {
bool has_gop_length; // Whether or not config has group of picture length bool has_gop_length; // Whether or not config has group of picture length
uint8 h264_output_level; uint8 h264_output_level;
bool has_h264_output_level; // Whether or not config has H264 output level bool has_h264_output_level; // Whether or not config has H264 output level
StorageType storage_type;
bool has_storage_type; // Whether or not config has storage type config
ContentType content_type; ContentType content_type;
}; };
......
...@@ -116,6 +116,38 @@ bool StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata>::Read( ...@@ -116,6 +116,38 @@ bool StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata>::Read(
return true; return true;
} }
// static
media::mojom::VideoEncodeAcceleratorConfig::StorageType
EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::StorageType,
media::VideoEncodeAccelerator::Config::StorageType>::
ToMojom(media::VideoEncodeAccelerator::Config::StorageType input) {
switch (input) {
case media::VideoEncodeAccelerator::Config::StorageType::kDmabuf:
return media::mojom::VideoEncodeAcceleratorConfig::StorageType::kDmabuf;
case media::VideoEncodeAccelerator::Config::StorageType::kShmem:
return media::mojom::VideoEncodeAcceleratorConfig::StorageType::kShmem;
}
NOTREACHED();
return media::mojom::VideoEncodeAcceleratorConfig::StorageType::kShmem;
}
// static
bool EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::StorageType,
media::VideoEncodeAccelerator::Config::StorageType>::
FromMojom(media::mojom::VideoEncodeAcceleratorConfig::StorageType input,
media::VideoEncodeAccelerator::Config::StorageType* output) {
switch (input) {
case media::mojom::VideoEncodeAcceleratorConfig::StorageType::kShmem:
*output = media::VideoEncodeAccelerator::Config::StorageType::kShmem;
return true;
case media::mojom::VideoEncodeAcceleratorConfig::StorageType::kDmabuf:
*output = media::VideoEncodeAccelerator::Config::StorageType::kDmabuf;
return true;
}
NOTREACHED();
return false;
}
// static // static
media::mojom::VideoEncodeAcceleratorConfig::ContentType media::mojom::VideoEncodeAcceleratorConfig::ContentType
EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::ContentType, EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::ContentType,
...@@ -176,13 +208,20 @@ bool StructTraits<media::mojom::VideoEncodeAcceleratorConfigDataView, ...@@ -176,13 +208,20 @@ bool StructTraits<media::mojom::VideoEncodeAcceleratorConfigDataView,
if (input.has_h264_output_level()) if (input.has_h264_output_level())
h264_output_level = input.h264_output_level(); h264_output_level = input.h264_output_level();
base::Optional<media::VideoEncodeAccelerator::Config::StorageType>
storage_type;
if (input.has_storage_type()) {
if (!input.ReadStorageType(&storage_type))
return false;
}
media::VideoEncodeAccelerator::Config::ContentType content_type; media::VideoEncodeAccelerator::Config::ContentType content_type;
if (!input.ReadContentType(&content_type)) if (!input.ReadContentType(&content_type))
return false; return false;
*output = media::VideoEncodeAccelerator::Config( *output = media::VideoEncodeAccelerator::Config(
input_format, input_visible_size, output_profile, input.initial_bitrate(), input_format, input_visible_size, output_profile, input.initial_bitrate(),
initial_framerate, gop_length, h264_output_level, base::nullopt, initial_framerate, gop_length, h264_output_level, storage_type,
content_type); content_type);
return true; return true;
} }
......
...@@ -75,6 +75,17 @@ class StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata> { ...@@ -75,6 +75,17 @@ class StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata> {
media::Vp8Metadata* out_metadata); media::Vp8Metadata* out_metadata);
}; };
template <>
struct EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::StorageType,
media::VideoEncodeAccelerator::Config::StorageType> {
static media::mojom::VideoEncodeAcceleratorConfig::StorageType ToMojom(
media::VideoEncodeAccelerator::Config::StorageType input);
static bool FromMojom(
media::mojom::VideoEncodeAcceleratorConfig::StorageType,
media::VideoEncodeAccelerator::Config::StorageType* output);
};
template <> template <>
struct EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::ContentType, struct EnumTraits<media::mojom::VideoEncodeAcceleratorConfig::ContentType,
media::VideoEncodeAccelerator::Config::ContentType> { media::VideoEncodeAccelerator::Config::ContentType> {
...@@ -139,6 +150,17 @@ struct StructTraits<media::mojom::VideoEncodeAcceleratorConfigDataView, ...@@ -139,6 +150,17 @@ struct StructTraits<media::mojom::VideoEncodeAcceleratorConfigDataView,
return input.h264_output_level.has_value(); return input.h264_output_level.has_value();
} }
static media::VideoEncodeAccelerator::Config::StorageType storage_type(
const media::VideoEncodeAccelerator::Config& input) {
return input.storage_type.value_or(
media::VideoEncodeAccelerator::Config::StorageType::kShmem);
}
static bool has_storage_type(
const media::VideoEncodeAccelerator::Config& input) {
return input.storage_type.has_value();
}
static media::VideoEncodeAccelerator::Config::ContentType content_type( static media::VideoEncodeAccelerator::Config::ContentType content_type(
const media::VideoEncodeAccelerator::Config& input) { const media::VideoEncodeAccelerator::Config& input) {
return input.content_type; return input.content_type;
......
...@@ -8,9 +8,12 @@ ...@@ -8,9 +8,12 @@
#include <vector> #include <vector>
#include "base/logging.h" #include "base/logging.h"
#include "build/build_config.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h" #include "media/mojo/common/mojo_shared_buffer_video_frame.h"
#include "mojo/public/cpp/base/time_mojom_traits.h" #include "mojo/public/cpp/base/time_mojom_traits.h"
#include "mojo/public/cpp/base/values_mojom_traits.h" #include "mojo/public/cpp/base/values_mojom_traits.h"
#include "mojo/public/cpp/system/handle.h"
#include "mojo/public/cpp/system/platform_handle.h"
#include "ui/gfx/mojo/color_space_mojom_traits.h" #include "ui/gfx/mojo/color_space_mojom_traits.h"
namespace mojo { namespace mojo {
...@@ -46,6 +49,23 @@ media::mojom::VideoFrameDataPtr MakeVideoFrameData( ...@@ -46,6 +49,23 @@ media::mojom::VideoFrameDataPtr MakeVideoFrameData(
mojo_frame->PlaneOffset(media::VideoFrame::kVPlane))); mojo_frame->PlaneOffset(media::VideoFrame::kVPlane)));
} }
#if defined(OS_LINUX)
if (input->storage_type() == media::VideoFrame::STORAGE_DMABUFS) {
std::vector<mojo::ScopedHandle> dmabuf_fds;
const size_t num_planes = media::VideoFrame::NumPlanes(input->format());
dmabuf_fds.reserve(num_planes);
for (size_t i = 0; i < num_planes; i++) {
const int dmabuf_fd = HANDLE_EINTR(dup(input->DmabufFds()[i].get()));
dmabuf_fds.emplace_back(mojo::WrapPlatformFile(dmabuf_fd));
DCHECK(dmabuf_fds.back().is_valid());
}
return media::mojom::VideoFrameData::NewDmabufData(
media::mojom::DmabufVideoFrameData::New(std::move(dmabuf_fds)));
}
#endif
if (input->HasTextures()) { if (input->HasTextures()) {
std::vector<gpu::MailboxHolder> mailbox_holder( std::vector<gpu::MailboxHolder> mailbox_holder(
media::VideoFrame::kMaxPlanes); media::VideoFrame::kMaxPlanes);
...@@ -122,6 +142,46 @@ bool StructTraits<media::mojom::VideoFrameDataView, ...@@ -122,6 +142,46 @@ bool StructTraits<media::mojom::VideoFrameDataView,
shared_buffer_data.u_offset(), shared_buffer_data.v_offset(), shared_buffer_data.u_offset(), shared_buffer_data.v_offset(),
shared_buffer_data.y_stride(), shared_buffer_data.u_stride(), shared_buffer_data.y_stride(), shared_buffer_data.u_stride(),
shared_buffer_data.v_stride(), timestamp); shared_buffer_data.v_stride(), timestamp);
#if defined(OS_LINUX)
} else if (data.is_dmabuf_data()) {
media::mojom::DmabufVideoFrameDataDataView dmabuf_data;
data.GetDmabufDataDataView(&dmabuf_data);
std::vector<mojo::ScopedHandle> dmabuf_fds_data;
if (!dmabuf_data.ReadDmabufFds(&dmabuf_fds_data))
return false;
const size_t num_planes = media::VideoFrame::NumPlanes(format);
std::vector<int> strides =
media::VideoFrame::ComputeStrides(format, coded_size);
std::vector<size_t> buffer_sizes;
buffer_sizes.reserve(num_planes);
for (size_t i = 0; i < num_planes; i++) {
buffer_sizes.emplace_back(static_cast<size_t>(
media::VideoFrame::PlaneSize(format, i, coded_size).GetArea()));
}
DCHECK_EQ(num_planes, dmabuf_fds_data.size());
DCHECK_EQ(num_planes, strides.size());
DCHECK_EQ(num_planes, buffer_sizes.size());
auto layout = media::VideoFrameLayout::CreateWithStrides(
format, coded_size, std::move(strides), std::move(buffer_sizes));
if (!layout)
return false;
std::vector<base::ScopedFD> dmabuf_fds;
dmabuf_fds.reserve(num_planes);
for (size_t i = 0; i < num_planes; i++) {
base::PlatformFile platform_file;
mojo::UnwrapPlatformFile(std::move(dmabuf_fds_data[i]), &platform_file);
dmabuf_fds.emplace_back(platform_file);
DCHECK(dmabuf_fds.back().is_valid());
}
frame = media::VideoFrame::WrapExternalDmabufs(
*layout, visible_rect, natural_size, std::move(dmabuf_fds), timestamp);
#endif
} else if (data.is_mailbox_data()) { } else if (data.is_mailbox_data()) {
media::mojom::MailboxVideoFrameDataDataView mailbox_data; media::mojom::MailboxVideoFrameDataDataView mailbox_data;
data.GetMailboxDataDataView(&mailbox_data); data.GetMailboxDataDataView(&mailbox_data);
......
...@@ -20,6 +20,11 @@ ...@@ -20,6 +20,11 @@
#include "ui/gfx/geometry/rect.h" #include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h" #include "ui/gfx/geometry/size.h"
#if defined(OS_LINUX)
#include <fcntl.h>
#include <sys/stat.h>
#endif
namespace media { namespace media {
namespace { namespace {
...@@ -93,6 +98,40 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) { ...@@ -93,6 +98,40 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
EXPECT_TRUE(mojo_shared_buffer_frame->Handle().is_valid()); EXPECT_TRUE(mojo_shared_buffer_frame->Handle().is_valid());
} }
#if defined(OS_LINUX)
TEST_F(VideoFrameStructTraitsTest, DmabufVideoFrame) {
const size_t num_planes = media::VideoFrame::NumPlanes(PIXEL_FORMAT_NV12);
std::vector<int> strides = {1280, 1280};
std::vector<size_t> buffer_sizes = {1280 * 720, 1280 * 720 / 2};
auto layout = media::VideoFrameLayout::CreateWithStrides(
PIXEL_FORMAT_NV12, gfx::Size(1280, 720), std::move(strides),
std::move(buffer_sizes));
// DMABUF needs device to create, use file fd instead.
std::vector<int> fake_fds = {open("/dev/null", O_RDWR),
open("/dev/zero", O_RDWR)};
std::vector<base::ScopedFD> dmabuf_fds;
dmabuf_fds.reserve(num_planes);
for (size_t i = 0; i < num_planes; i++)
dmabuf_fds.emplace_back(fake_fds[i]);
scoped_refptr<VideoFrame> frame = VideoFrame::WrapExternalDmabufs(
*layout, gfx::Rect(0, 0, 1280, 720), gfx::Size(1280, 720),
std::move(dmabuf_fds), base::TimeDelta::FromSeconds(100));
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
EXPECT_FALSE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
EXPECT_EQ(frame->format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->coded_size(), gfx::Size(1280, 720));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(0, 0, 1280, 720));
EXPECT_EQ(frame->natural_size(), gfx::Size(1280, 720));
EXPECT_EQ(frame->timestamp(), base::TimeDelta::FromSeconds(100));
ASSERT_TRUE(frame->HasDmaBufs());
ASSERT_EQ(frame->storage_type(), VideoFrame::STORAGE_DMABUFS);
}
#endif
TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) { TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
gpu::Mailbox mailbox = gpu::Mailbox::Generate(); gpu::Mailbox mailbox = gpu::Mailbox::Generate();
gpu::MailboxHolder mailbox_holder[VideoFrame::kMaxPlanes]; gpu::MailboxHolder mailbox_holder[VideoFrame::kMaxPlanes];
......
...@@ -50,8 +50,9 @@ void MojoVideoEncodeAcceleratorService::Initialize( ...@@ -50,8 +50,9 @@ void MojoVideoEncodeAcceleratorService::Initialize(
DVLOG(1) << __func__ << " " << config.AsHumanReadableString(); DVLOG(1) << __func__ << " " << config.AsHumanReadableString();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!encoder_); DCHECK(!encoder_);
DCHECK_EQ(PIXEL_FORMAT_I420, config.input_format) DCHECK(config.input_format == PIXEL_FORMAT_I420 ||
<< "Only I420 format supported"; config.input_format == PIXEL_FORMAT_NV12)
<< "Only I420 or NV12 format supported";
if (!client) { if (!client) {
DLOG(ERROR) << __func__ << "null |client|"; DLOG(ERROR) << __func__ << "null |client|";
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment