Commit cf38f978 authored by Dale Curtis's avatar Dale Curtis Committed by Chromium LUCI CQ

Implement WebCodecs support for ARGB, ABGR, xRGB, and xBGR.

This allows the zero-copy construction of RGB frames from CPU backed
ImageBitmaps and removes a copy from YUV backed ImageBitmaps. As the
formats are now supported, planar access is enabled.

Encoding support is enabled by teaching the utility method used by
the encoders how to scale and convert RGB frames to I420 and NV12.

Fixed: 1161012
Test: https://storage.googleapis.com/dalecurtis/canvas-test.html
Change-Id: I3944a4903b8defc5592ba65d4536ec4d0750c861
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2601678
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: default avatarEugene Zemtsov <eugene@chromium.org>
Auto-Submit: Dale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#840760}
parent e8100cd3
......@@ -507,6 +507,71 @@ Status ConvertAndScaleFrame(const VideoFrame& src_frame,
if (!src_frame.IsMappable() || !dst_frame.IsMappable())
return Status(StatusCode::kUnsupportedFrameFormatError);
if ((dst_frame.format() == PIXEL_FORMAT_I420 ||
dst_frame.format() == PIXEL_FORMAT_NV12) &&
(src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_XRGB ||
src_frame.format() == PIXEL_FORMAT_ABGR ||
src_frame.format() == PIXEL_FORMAT_ARGB)) {
// libyuv's RGB to YUV methods always output BT.601.
dst_frame.set_color_space(gfx::ColorSpace::CreateREC601());
size_t src_stride = src_frame.stride(VideoFrame::kARGBPlane);
const uint8_t* src_data = src_frame.visible_data(VideoFrame::kARGBPlane);
if (src_frame.visible_rect() != dst_frame.visible_rect()) {
size_t tmp_buffer_size = VideoFrame::AllocationSize(
src_frame.format(), dst_frame.coded_size());
if (tmp_buf.size() < tmp_buffer_size)
tmp_buf.resize(tmp_buffer_size);
size_t stride =
VideoFrame::RowBytes(VideoFrame::kARGBPlane, src_frame.format(),
dst_frame.visible_rect().width());
int error = libyuv::ARGBScale(
src_data, src_stride, src_frame.visible_rect().width(),
src_frame.visible_rect().height(), tmp_buf.data(), stride,
dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
kDefaultFiltering);
if (error)
return Status(StatusCode::kInvalidArgument);
src_data = tmp_buf.data();
src_stride = stride;
}
if (dst_frame.format() == PIXEL_FORMAT_I420) {
auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_ABGR)
? libyuv::ABGRToI420
: libyuv::ARGBToI420;
int error = convert_fn(src_data, src_stride,
dst_frame.visible_data(media::VideoFrame::kYPlane),
dst_frame.stride(media::VideoFrame::kYPlane),
dst_frame.visible_data(media::VideoFrame::kUPlane),
dst_frame.stride(media::VideoFrame::kUPlane),
dst_frame.visible_data(media::VideoFrame::kVPlane),
dst_frame.stride(media::VideoFrame::kVPlane),
dst_frame.visible_rect().width(),
dst_frame.visible_rect().height());
return error ? Status(StatusCode::kInvalidArgument) : Status();
}
auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_ABGR)
? libyuv::ABGRToNV12
: libyuv::ARGBToNV12;
int error = convert_fn(src_data, src_stride,
dst_frame.visible_data(media::VideoFrame::kYPlane),
dst_frame.stride(media::VideoFrame::kYPlane),
dst_frame.visible_data(media::VideoFrame::kUVPlane),
dst_frame.stride(media::VideoFrame::kUVPlane),
dst_frame.visible_rect().width(),
dst_frame.visible_rect().height());
return error ? Status(StatusCode::kInvalidArgument) : Status();
}
// Converting between YUV formats doesn't change the color space.
dst_frame.set_color_space(src_frame.ColorSpace());
// Both frames are I420, only scaling is required.
if (dst_frame.format() == PIXEL_FORMAT_I420 &&
src_frame.format() == PIXEL_FORMAT_I420) {
......
......@@ -386,7 +386,8 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
uint8_t* pixels = static_cast<uint8_t*>(rgb_pixels) +
row_bytes * chunk_start * rows_per_chunk;
if (format == PIXEL_FORMAT_ARGB) {
if (format == PIXEL_FORMAT_ARGB || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_XBGR) {
DCHECK_LE(width, static_cast<int>(row_bytes));
const uint8_t* data = plane_meta[VideoFrame::kARGBPlane].data;
for (size_t i = 0; i < rows; i++) {
......@@ -928,8 +929,11 @@ void PaintCanvasVideoRenderer::Paint(
// frame has an unexpected format.
if (!video_frame.get() || video_frame->natural_size().IsEmpty() ||
!(media::IsYuvPlanar(video_frame->format()) ||
video_frame->format() == media::PIXEL_FORMAT_Y16 ||
video_frame->format() == media::PIXEL_FORMAT_ARGB ||
video_frame->format() == PIXEL_FORMAT_Y16 ||
video_frame->format() == PIXEL_FORMAT_ARGB ||
video_frame->format() == PIXEL_FORMAT_XRGB ||
video_frame->format() == PIXEL_FORMAT_ABGR ||
video_frame->format() == PIXEL_FORMAT_XBGR ||
video_frame->HasTextures())) {
cc::PaintFlags black_with_alpha_flags;
black_with_alpha_flags.setAlpha(flags.getAlpha());
......
......@@ -153,8 +153,12 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
const bool supported_format = (frame->format() == PIXEL_FORMAT_NV12) ||
(frame->format() == PIXEL_FORMAT_I420);
const bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_XBGR ||
frame->format() == PIXEL_FORMAT_XRGB ||
frame->format() == PIXEL_FORMAT_ABGR ||
frame->format() == PIXEL_FORMAT_ARGB;
if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
!supported_format) {
status =
......@@ -175,7 +179,7 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
}
if (frame->format() == PIXEL_FORMAT_NV12) {
if (frame->format() != PIXEL_FORMAT_I420) {
// OpenH264 can resize frame automatically, but since we're converting
// pixel fromat anyway we can do resize as well.
auto i420_frame = frame_pool_.CreateFrame(
......
......@@ -41,9 +41,20 @@ VideoEncodeAccelerator::Config SetUpVeaConfig(
opts.bitrate.value_or(opts.frame_size.width() * opts.frame_size.height() *
kVEADefaultBitratePerPixel));
const bool is_rgb =
format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
// Override the provided format if incoming frames are RGB -- they'll be
// converted to I420 or NV12 depending on the VEA configuration.
if (is_rgb)
config.input_format = PIXEL_FORMAT_I420;
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
if (storage_type == VideoFrame::STORAGE_DMABUFS ||
storage_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
if (is_rgb)
config.input_format = PIXEL_FORMAT_NV12;
config.storage_type = VideoEncodeAccelerator::Config::StorageType::kDmabuf;
}
#endif
......@@ -223,9 +234,13 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
// We use the first frame to setup the VEA config so that we can ensure that
// zero copy hardware encoding from the camera can be used.
const auto& first_frame = pending_encodes_.front()->frame;
auto format = first_frame->format();
if (format != PIXEL_FORMAT_I420 && format != PIXEL_FORMAT_NV12) {
const auto format = first_frame->format();
const bool is_rgb =
format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
const bool supported_format =
format == PIXEL_FORMAT_NV12 || format == PIXEL_FORMAT_I420 || is_rgb;
if (!supported_format) {
auto status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("frame", first_frame->AsHumanReadableString());
......@@ -259,7 +274,7 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
}
state_ = State::kInitializing;
format_ = format;
format_ = vea_config.input_format;
}
void VideoEncodeAcceleratorAdapter::Encode(scoped_refptr<VideoFrame> frame,
......@@ -610,8 +625,8 @@ T VideoEncodeAcceleratorAdapter::WrapCallback(T cb) {
return BindToLoop(callback_task_runner_.get(), std::move(cb));
}
// Copy a frame into a shared mem buffer and resize it as the same time.
// Input frames can I420 or NV12, they'll be converted to I420 if needed.
// Copy a frame into a shared mem buffer and resize it as the same time. Input
// frames can I420, NV12, or RGB -- they'll be converted to I420 if needed.
StatusOr<scoped_refptr<VideoFrame>>
VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
const gfx::Size& size,
......@@ -646,8 +661,8 @@ VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
return shared_frame;
}
// Copy a frame into a GPU buffer and resize it as the same time.
// Input frames can I420 or NV12, they'll be converted to NV12 if needed.
// Copy a frame into a GPU buffer and resize it as the same time. Input frames
// can I420, NV12, or RGB -- they'll be converted to NV12 if needed.
StatusOr<scoped_refptr<VideoFrame>>
VideoEncodeAcceleratorAdapter::PrepareGpuFrame(
const gfx::Size& size,
......
......@@ -112,6 +112,23 @@ class VideoEncodeAcceleratorAdapterTest
return frame;
}
scoped_refptr<VideoFrame> CreateGreenCpuFrameARGB(gfx::Size size,
base::TimeDelta timestamp) {
auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_XRGB, size,
gfx::Rect(size), size, timestamp);
// Green XRGB frame (R:0x3B, G:0xD9, B:0x24)
libyuv::ARGBRect(frame->data(VideoFrame::kARGBPlane),
frame->stride(VideoFrame::kARGBPlane),
0, // left
0, // top
frame->visible_rect().width(), // right
frame->visible_rect().height(), // bottom
0x24D93B00); // V color
return frame;
}
scoped_refptr<VideoFrame> CreateGreenFrame(gfx::Size size,
VideoPixelFormat format,
base::TimeDelta timestamp) {
......@@ -120,6 +137,8 @@ class VideoEncodeAcceleratorAdapterTest
return CreateGreenCpuFrame(size, timestamp);
case PIXEL_FORMAT_NV12:
return CreateGreenGpuFrame(size, timestamp);
case PIXEL_FORMAT_XRGB:
return CreateGreenCpuFrameARGB(size, timestamp);
default:
EXPECT_TRUE(false) << "not supported pixel format";
return nullptr;
......@@ -237,7 +256,8 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, TwoFramesResize) {
vea()->SetEncodingCallback(base::BindLambdaForTesting(
[&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
EXPECT_EQ(frame->format(), pixel_format);
EXPECT_EQ(frame->format(),
IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
#else
// Everywhere except on Linux resize switches frame into CPU mode.
EXPECT_EQ(frame->format(), PIXEL_FORMAT_I420);
......@@ -280,7 +300,8 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
vea()->SetEncodingCallback(base::BindLambdaForTesting(
[&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
EXPECT_EQ(frame->format(), pixel_format);
EXPECT_EQ(frame->format(),
IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
EXPECT_EQ(frame->coded_size(), options.frame_size);
return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
}));
......@@ -295,13 +316,17 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
size = small_size;
else
size = same_size;
auto create_func =
(frame_index & 4)
? &VideoEncodeAcceleratorAdapterTest::CreateGreenGpuFrame
: &VideoEncodeAcceleratorAdapterTest::CreateGreenCpuFrame;
// Every 4 frames switch between the 3 supported formats.
const int rem = frame_index % 12;
auto format = PIXEL_FORMAT_XRGB;
if (rem < 4)
format = PIXEL_FORMAT_I420;
else if (rem < 8)
format = PIXEL_FORMAT_NV12;
bool key = frame_index % 9 == 0;
auto frame = (this->*create_func)(
size, base::TimeDelta::FromMilliseconds(frame_index));
auto frame = CreateGreenFrame(
size, format, base::TimeDelta::FromMilliseconds(frame_index));
adapter()->Encode(frame, key, ValidatingStatusCB());
}
......@@ -312,6 +337,7 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
INSTANTIATE_TEST_SUITE_P(VideoEncodeAcceleratorAdapterTest,
VideoEncodeAcceleratorAdapterTest,
::testing::Values(PIXEL_FORMAT_I420,
PIXEL_FORMAT_NV12));
PIXEL_FORMAT_NV12,
PIXEL_FORMAT_XRGB));
} // namespace media
......@@ -251,8 +251,12 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
bool supported_format = (frame->format() == PIXEL_FORMAT_NV12) ||
(frame->format() == PIXEL_FORMAT_I420);
bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_XBGR ||
frame->format() == PIXEL_FORMAT_XRGB ||
frame->format() == PIXEL_FORMAT_ABGR ||
frame->format() == PIXEL_FORMAT_ARGB;
if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
!supported_format) {
status =
......@@ -273,10 +277,12 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
}
if (frame->visible_rect().size() != options_.frame_size) {
const bool is_yuv = IsYuvPlanar(frame->format());
if (frame->visible_rect().size() != options_.frame_size || !is_yuv) {
auto resized_frame = frame_pool_.CreateFrame(
frame->format(), options_.frame_size, gfx::Rect(options_.frame_size),
options_.frame_size, frame->timestamp());
is_yuv ? frame->format() : PIXEL_FORMAT_I420, options_.frame_size,
gfx::Rect(options_.frame_size), options_.frame_size,
frame->timestamp());
if (resized_frame) {
status = ConvertAndScaleFrame(*frame, *resized_frame, resize_buf_);
} else {
......
......@@ -14,6 +14,12 @@ enum VideoPixelFormat {
// 32bpp RGBA (byte-order), 1 plane.
"ABGR",
// 32bpp BGRA (byte-order), 1 plane.
"ARGB",
// 24bpp RGBX (byte-order), 1 plane.
"XBGR",
// 24bpp BGRX (byte-order), 1 plane.
"XRGB",
};
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment