Commit cf38f978 authored by Dale Curtis's avatar Dale Curtis Committed by Chromium LUCI CQ

Implement WebCodecs support for ARGB, ABGR, xRGB, and xBGR.

This allows the zero-copy construction of RGB frames from CPU backed
ImageBitmaps and removes a copy from YUV backed ImageBitmaps. As the
formats are now supported, planar access is enabled.

Encoding support is enabled by teaching the utility method used by
the encoders how to scale and convert RGB frames to I420 and NV12.

Fixed: 1161012
Test: https://storage.googleapis.com/dalecurtis/canvas-test.html
Change-Id: I3944a4903b8defc5592ba65d4536ec4d0750c861
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2601678
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: default avatarEugene Zemtsov <eugene@chromium.org>
Auto-Submit: Dale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#840760}
parent e8100cd3
......@@ -507,6 +507,71 @@ Status ConvertAndScaleFrame(const VideoFrame& src_frame,
if (!src_frame.IsMappable() || !dst_frame.IsMappable())
return Status(StatusCode::kUnsupportedFrameFormatError);
if ((dst_frame.format() == PIXEL_FORMAT_I420 ||
dst_frame.format() == PIXEL_FORMAT_NV12) &&
(src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_XRGB ||
src_frame.format() == PIXEL_FORMAT_ABGR ||
src_frame.format() == PIXEL_FORMAT_ARGB)) {
// libyuv's RGB to YUV methods always output BT.601.
dst_frame.set_color_space(gfx::ColorSpace::CreateREC601());
size_t src_stride = src_frame.stride(VideoFrame::kARGBPlane);
const uint8_t* src_data = src_frame.visible_data(VideoFrame::kARGBPlane);
if (src_frame.visible_rect() != dst_frame.visible_rect()) {
size_t tmp_buffer_size = VideoFrame::AllocationSize(
src_frame.format(), dst_frame.coded_size());
if (tmp_buf.size() < tmp_buffer_size)
tmp_buf.resize(tmp_buffer_size);
size_t stride =
VideoFrame::RowBytes(VideoFrame::kARGBPlane, src_frame.format(),
dst_frame.visible_rect().width());
int error = libyuv::ARGBScale(
src_data, src_stride, src_frame.visible_rect().width(),
src_frame.visible_rect().height(), tmp_buf.data(), stride,
dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
kDefaultFiltering);
if (error)
return Status(StatusCode::kInvalidArgument);
src_data = tmp_buf.data();
src_stride = stride;
}
if (dst_frame.format() == PIXEL_FORMAT_I420) {
auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_ABGR)
? libyuv::ABGRToI420
: libyuv::ARGBToI420;
int error = convert_fn(src_data, src_stride,
dst_frame.visible_data(media::VideoFrame::kYPlane),
dst_frame.stride(media::VideoFrame::kYPlane),
dst_frame.visible_data(media::VideoFrame::kUPlane),
dst_frame.stride(media::VideoFrame::kUPlane),
dst_frame.visible_data(media::VideoFrame::kVPlane),
dst_frame.stride(media::VideoFrame::kVPlane),
dst_frame.visible_rect().width(),
dst_frame.visible_rect().height());
return error ? Status(StatusCode::kInvalidArgument) : Status();
}
auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
src_frame.format() == PIXEL_FORMAT_ABGR)
? libyuv::ABGRToNV12
: libyuv::ARGBToNV12;
int error = convert_fn(src_data, src_stride,
dst_frame.visible_data(media::VideoFrame::kYPlane),
dst_frame.stride(media::VideoFrame::kYPlane),
dst_frame.visible_data(media::VideoFrame::kUVPlane),
dst_frame.stride(media::VideoFrame::kUVPlane),
dst_frame.visible_rect().width(),
dst_frame.visible_rect().height());
return error ? Status(StatusCode::kInvalidArgument) : Status();
}
// Converting between YUV formats doesn't change the color space.
dst_frame.set_color_space(src_frame.ColorSpace());
// Both frames are I420, only scaling is required.
if (dst_frame.format() == PIXEL_FORMAT_I420 &&
src_frame.format() == PIXEL_FORMAT_I420) {
......
......@@ -386,7 +386,8 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
uint8_t* pixels = static_cast<uint8_t*>(rgb_pixels) +
row_bytes * chunk_start * rows_per_chunk;
if (format == PIXEL_FORMAT_ARGB) {
if (format == PIXEL_FORMAT_ARGB || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_XBGR) {
DCHECK_LE(width, static_cast<int>(row_bytes));
const uint8_t* data = plane_meta[VideoFrame::kARGBPlane].data;
for (size_t i = 0; i < rows; i++) {
......@@ -928,8 +929,11 @@ void PaintCanvasVideoRenderer::Paint(
// frame has an unexpected format.
if (!video_frame.get() || video_frame->natural_size().IsEmpty() ||
!(media::IsYuvPlanar(video_frame->format()) ||
video_frame->format() == media::PIXEL_FORMAT_Y16 ||
video_frame->format() == media::PIXEL_FORMAT_ARGB ||
video_frame->format() == PIXEL_FORMAT_Y16 ||
video_frame->format() == PIXEL_FORMAT_ARGB ||
video_frame->format() == PIXEL_FORMAT_XRGB ||
video_frame->format() == PIXEL_FORMAT_ABGR ||
video_frame->format() == PIXEL_FORMAT_XBGR ||
video_frame->HasTextures())) {
cc::PaintFlags black_with_alpha_flags;
black_with_alpha_flags.setAlpha(flags.getAlpha());
......
......@@ -153,8 +153,12 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
const bool supported_format = (frame->format() == PIXEL_FORMAT_NV12) ||
(frame->format() == PIXEL_FORMAT_I420);
const bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_XBGR ||
frame->format() == PIXEL_FORMAT_XRGB ||
frame->format() == PIXEL_FORMAT_ABGR ||
frame->format() == PIXEL_FORMAT_ARGB;
if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
!supported_format) {
status =
......@@ -175,7 +179,7 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
}
if (frame->format() == PIXEL_FORMAT_NV12) {
if (frame->format() != PIXEL_FORMAT_I420) {
// OpenH264 can resize frame automatically, but since we're converting
// pixel fromat anyway we can do resize as well.
auto i420_frame = frame_pool_.CreateFrame(
......
......@@ -41,9 +41,20 @@ VideoEncodeAccelerator::Config SetUpVeaConfig(
opts.bitrate.value_or(opts.frame_size.width() * opts.frame_size.height() *
kVEADefaultBitratePerPixel));
const bool is_rgb =
format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
// Override the provided format if incoming frames are RGB -- they'll be
// converted to I420 or NV12 depending on the VEA configuration.
if (is_rgb)
config.input_format = PIXEL_FORMAT_I420;
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
if (storage_type == VideoFrame::STORAGE_DMABUFS ||
storage_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
if (is_rgb)
config.input_format = PIXEL_FORMAT_NV12;
config.storage_type = VideoEncodeAccelerator::Config::StorageType::kDmabuf;
}
#endif
......@@ -223,9 +234,13 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
// We use the first frame to setup the VEA config so that we can ensure that
// zero copy hardware encoding from the camera can be used.
const auto& first_frame = pending_encodes_.front()->frame;
auto format = first_frame->format();
if (format != PIXEL_FORMAT_I420 && format != PIXEL_FORMAT_NV12) {
const auto format = first_frame->format();
const bool is_rgb =
format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
const bool supported_format =
format == PIXEL_FORMAT_NV12 || format == PIXEL_FORMAT_I420 || is_rgb;
if (!supported_format) {
auto status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("frame", first_frame->AsHumanReadableString());
......@@ -259,7 +274,7 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
}
state_ = State::kInitializing;
format_ = format;
format_ = vea_config.input_format;
}
void VideoEncodeAcceleratorAdapter::Encode(scoped_refptr<VideoFrame> frame,
......@@ -610,8 +625,8 @@ T VideoEncodeAcceleratorAdapter::WrapCallback(T cb) {
return BindToLoop(callback_task_runner_.get(), std::move(cb));
}
// Copy a frame into a shared mem buffer and resize it as the same time.
// Input frames can I420 or NV12, they'll be converted to I420 if needed.
// Copy a frame into a shared mem buffer and resize it as the same time. Input
// frames can I420, NV12, or RGB -- they'll be converted to I420 if needed.
StatusOr<scoped_refptr<VideoFrame>>
VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
const gfx::Size& size,
......@@ -646,8 +661,8 @@ VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
return shared_frame;
}
// Copy a frame into a GPU buffer and resize it as the same time.
// Input frames can I420 or NV12, they'll be converted to NV12 if needed.
// Copy a frame into a GPU buffer and resize it as the same time. Input frames
// can I420, NV12, or RGB -- they'll be converted to NV12 if needed.
StatusOr<scoped_refptr<VideoFrame>>
VideoEncodeAcceleratorAdapter::PrepareGpuFrame(
const gfx::Size& size,
......
......@@ -112,6 +112,23 @@ class VideoEncodeAcceleratorAdapterTest
return frame;
}
scoped_refptr<VideoFrame> CreateGreenCpuFrameARGB(gfx::Size size,
base::TimeDelta timestamp) {
auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_XRGB, size,
gfx::Rect(size), size, timestamp);
// Green XRGB frame (R:0x3B, G:0xD9, B:0x24)
libyuv::ARGBRect(frame->data(VideoFrame::kARGBPlane),
frame->stride(VideoFrame::kARGBPlane),
0, // left
0, // top
frame->visible_rect().width(), // right
frame->visible_rect().height(), // bottom
0x24D93B00); // V color
return frame;
}
scoped_refptr<VideoFrame> CreateGreenFrame(gfx::Size size,
VideoPixelFormat format,
base::TimeDelta timestamp) {
......@@ -120,6 +137,8 @@ class VideoEncodeAcceleratorAdapterTest
return CreateGreenCpuFrame(size, timestamp);
case PIXEL_FORMAT_NV12:
return CreateGreenGpuFrame(size, timestamp);
case PIXEL_FORMAT_XRGB:
return CreateGreenCpuFrameARGB(size, timestamp);
default:
EXPECT_TRUE(false) << "not supported pixel format";
return nullptr;
......@@ -237,7 +256,8 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, TwoFramesResize) {
vea()->SetEncodingCallback(base::BindLambdaForTesting(
[&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
EXPECT_EQ(frame->format(), pixel_format);
EXPECT_EQ(frame->format(),
IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
#else
// Everywhere except on Linux resize switches frame into CPU mode.
EXPECT_EQ(frame->format(), PIXEL_FORMAT_I420);
......@@ -280,7 +300,8 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
vea()->SetEncodingCallback(base::BindLambdaForTesting(
[&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
EXPECT_EQ(frame->format(), pixel_format);
EXPECT_EQ(frame->format(),
IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
EXPECT_EQ(frame->coded_size(), options.frame_size);
return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
}));
......@@ -295,13 +316,17 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
size = small_size;
else
size = same_size;
auto create_func =
(frame_index & 4)
? &VideoEncodeAcceleratorAdapterTest::CreateGreenGpuFrame
: &VideoEncodeAcceleratorAdapterTest::CreateGreenCpuFrame;
// Every 4 frames switch between the 3 supported formats.
const int rem = frame_index % 12;
auto format = PIXEL_FORMAT_XRGB;
if (rem < 4)
format = PIXEL_FORMAT_I420;
else if (rem < 8)
format = PIXEL_FORMAT_NV12;
bool key = frame_index % 9 == 0;
auto frame = (this->*create_func)(
size, base::TimeDelta::FromMilliseconds(frame_index));
auto frame = CreateGreenFrame(
size, format, base::TimeDelta::FromMilliseconds(frame_index));
adapter()->Encode(frame, key, ValidatingStatusCB());
}
......@@ -312,6 +337,7 @@ TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
INSTANTIATE_TEST_SUITE_P(VideoEncodeAcceleratorAdapterTest,
VideoEncodeAcceleratorAdapterTest,
::testing::Values(PIXEL_FORMAT_I420,
PIXEL_FORMAT_NV12));
PIXEL_FORMAT_NV12,
PIXEL_FORMAT_XRGB));
} // namespace media
......@@ -251,8 +251,12 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
bool supported_format = (frame->format() == PIXEL_FORMAT_NV12) ||
(frame->format() == PIXEL_FORMAT_I420);
bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
frame->format() == PIXEL_FORMAT_I420 ||
frame->format() == PIXEL_FORMAT_XBGR ||
frame->format() == PIXEL_FORMAT_XRGB ||
frame->format() == PIXEL_FORMAT_ABGR ||
frame->format() == PIXEL_FORMAT_ARGB;
if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
!supported_format) {
status =
......@@ -273,10 +277,12 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
}
if (frame->visible_rect().size() != options_.frame_size) {
const bool is_yuv = IsYuvPlanar(frame->format());
if (frame->visible_rect().size() != options_.frame_size || !is_yuv) {
auto resized_frame = frame_pool_.CreateFrame(
frame->format(), options_.frame_size, gfx::Rect(options_.frame_size),
options_.frame_size, frame->timestamp());
is_yuv ? frame->format() : PIXEL_FORMAT_I420, options_.frame_size,
gfx::Rect(options_.frame_size), options_.frame_size,
frame->timestamp());
if (resized_frame) {
status = ConvertAndScaleFrame(*frame, *resized_frame, resize_buf_);
} else {
......
......@@ -28,6 +28,7 @@
#include "third_party/blink/renderer/platform/graphics/image.h"
#include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h"
#include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
......@@ -66,34 +67,38 @@ bool IsValidSkColorType(SkColorType sk_color_type) {
return false;
}
struct YUVReadbackContext {
gfx::Size coded_size;
gfx::Rect visible_rect;
gfx::Size natural_size;
base::TimeDelta timestamp;
scoped_refptr<media::VideoFrame> frame;
};
void OnYUVReadbackDone(
void* raw_frame_ptr,
void* raw_ctx,
std::unique_ptr<const SkImage::AsyncReadResult> async_result) {
scoped_refptr<media::VideoFrame> frame(
static_cast<media::VideoFrame*>(raw_frame_ptr));
if (!async_result) {
LOG(ERROR) << "Failed to read yuv420 back!";
if (!async_result)
return;
}
auto* data0 = static_cast<const uint8_t*>(async_result->data(0));
DCHECK(data0);
auto* data1 = static_cast<const uint8_t*>(async_result->data(1));
DCHECK(data1);
auto* data2 = static_cast<const uint8_t*>(async_result->data(2));
DCHECK(data2);
gfx::Size size = frame->coded_size();
libyuv::CopyPlane(data0, static_cast<int>(async_result->rowBytes(0)),
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane), size.width(),
size.height());
libyuv::CopyPlane(data1, static_cast<int>(async_result->rowBytes(1)),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane), size.width() / 2,
size.height() / 2);
libyuv::CopyPlane(data2, static_cast<int>(async_result->rowBytes(2)),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane), size.width() / 2,
size.height() / 2);
auto* context = reinterpret_cast<YUVReadbackContext*>(raw_ctx);
context->frame = media::VideoFrame::WrapExternalYuvData(
media::PIXEL_FORMAT_I420, context->coded_size, context->visible_rect,
context->natural_size, static_cast<int>(async_result->rowBytes(0)),
static_cast<int>(async_result->rowBytes(1)),
static_cast<int>(async_result->rowBytes(2)),
// TODO(crbug.com/1161304): We should be able to wrap readonly memory in
// a VideoFrame without resorting to a const_cast.
reinterpret_cast<uint8_t*>(const_cast<void*>(async_result->data(0))),
reinterpret_cast<uint8_t*>(const_cast<void*>(async_result->data(1))),
reinterpret_cast<uint8_t*>(const_cast<void*>(async_result->data(2))),
context->timestamp);
if (!context->frame)
return;
context->frame->AddDestructionObserver(
ConvertToBaseOnceCallback(WTF::CrossThreadBindOnce(
base::DoNothing::Once<
std::unique_ptr<const SkImage::AsyncReadResult>>(),
std::move(async_result))));
}
} // namespace
......@@ -127,49 +132,59 @@ VideoFrame* VideoFrame::Create(ScriptState* script_state,
return nullptr;
}
gfx::Size size(source->width(), source->height());
gfx::Rect rect(size);
base::TimeDelta timestamp =
base::TimeDelta::FromMicroseconds(init->timestamp());
const gfx::Size coded_size(source->width(), source->height());
const gfx::Rect visible_rect(coded_size);
const gfx::Size natural_size = coded_size;
const auto timestamp = base::TimeDelta::FromMicroseconds(init->timestamp());
const auto& paint_image = source->BitmapImage()->PaintImageForCurrentFrame();
const auto sk_image_info = paint_image.GetSkImageInfo();
auto sk_image_info =
source->BitmapImage()->PaintImageForCurrentFrame().GetSkImageInfo();
auto sk_color_space = sk_image_info.refColorSpace();
if (!sk_color_space) {
if (!sk_color_space)
sk_color_space = SkColorSpace::MakeSRGB();
}
if (!IsValidSkColorSpace(sk_color_space)) {
exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
"Invalid color space");
return nullptr;
}
auto frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420, size,
rect, size, timestamp);
if (!frame) {
exception_state.ThrowDOMException(DOMExceptionCode::kNotSupportedError,
"Frame creation failed");
return nullptr;
}
const bool is_texture = paint_image.IsTextureBacked();
const auto sk_image = paint_image.GetSkImage();
scoped_refptr<media::VideoFrame> frame;
bool is_texture =
source->BitmapImage()->PaintImageForCurrentFrame().IsTextureBacked();
// Now only SkImage_Gpu implemented the readbackYUV420 method, so for
// non-texture image, still use libyuv do the csc until SkImage_Base
// implement asyncRescaleAndReadPixelsYUV420.
if (is_texture) {
auto sk_image =
source->BitmapImage()->PaintImageForCurrentFrame().GetSkImage();
SkIRect src_rect = SkIRect::MakeWH(source->width(), source->height());
YUVReadbackContext result;
result.coded_size = coded_size;
result.visible_rect = visible_rect;
result.natural_size = natural_size;
result.timestamp = timestamp;
// While this function indicates it's asynchronous, the flushAndSubmit()
// call below ensures it completes synchronously.
const auto src_rect = SkIRect::MakeWH(source->width(), source->height());
sk_image->asyncRescaleAndReadPixelsYUV420(
kRec709_SkYUVColorSpace, sk_color_space, src_rect,
{source->width(), source->height()}, SkImage::RescaleGamma::kSrc,
kHigh_SkFilterQuality, &OnYUVReadbackDone, frame.get());
kHigh_SkFilterQuality, &OnYUVReadbackDone, &result);
GrDirectContext* gr_context =
source->BitmapImage()->ContextProvider()->GetGrContext();
DCHECK(gr_context);
gr_context->flushAndSubmit(/*syncCpu=*/true);
if (!result.frame) {
exception_state.ThrowDOMException(DOMExceptionCode::kOperationError,
"YUV conversion error during readback");
return nullptr;
}
frame = std::move(result.frame);
} else {
DCHECK(!sk_image->isTextureBacked());
auto sk_color_type = sk_image_info.colorType();
if (!IsValidSkColorType(sk_color_type)) {
exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
......@@ -177,47 +192,30 @@ VideoFrame* VideoFrame::Create(ScriptState* script_state,
return nullptr;
}
// TODO(jie.a.chen@intel.com): Handle data of float type.
// Full copy #1
WTF::Vector<uint8_t> pixel_data =
source->CopyBitmapData(source->GetBitmapSkImageInfo(), false);
if (pixel_data.size() <
media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_ARGB, size)) {
exception_state.ThrowDOMException(DOMExceptionCode::kBufferOverrunError,
"Image buffer is too small.");
return nullptr;
}
DCHECK(sk_color_type == kRGBA_8888_SkColorType ||
sk_color_type == kBGRA_8888_SkColorType);
auto libyuv_convert_to_i420 = (sk_color_type == kRGBA_8888_SkColorType)
? libyuv::ABGRToI420
: libyuv::ARGBToI420;
// TODO(jie.a.chen@intel.com): Use GPU to do the conversion.
// Full copy #2
int error =
libyuv_convert_to_i420(pixel_data.data(), source->width() * 4,
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane),
source->width(), source->height());
if (error) {
exception_state.ThrowDOMException(DOMExceptionCode::kOperationError,
"ARGB to YUV420 conversion error");
return nullptr;
}
gfx::ColorSpace gfx_color_space(*sk_color_space);
// 'libyuv_convert_to_i420' assumes SMPTE170M.
// Refer to the func below to check the actual conversion:
// third_party/libyuv/source/row_common.cc -- RGBToY(...)
gfx_color_space = gfx_color_space.GetWithMatrixAndRange(
gfx::ColorSpace::MatrixID::SMPTE170M,
gfx::ColorSpace::RangeID::LIMITED);
frame->set_color_space(gfx_color_space);
SkPixmap pm;
const bool peek_result = sk_image->peekPixels(&pm);
DCHECK(peek_result);
const auto format = sk_image->isOpaque()
? (sk_color_type == kRGBA_8888_SkColorType
? media::PIXEL_FORMAT_XBGR
: media::PIXEL_FORMAT_XRGB)
: (sk_color_type == kRGBA_8888_SkColorType
? media::PIXEL_FORMAT_ABGR
: media::PIXEL_FORMAT_ARGB);
frame = media::VideoFrame::WrapExternalData(
format, coded_size, visible_rect, natural_size,
// TODO(crbug.com/1161304): We should be able to wrap readonly memory in
// a VideoFrame instead of using writable_addr() here.
reinterpret_cast<uint8_t*>(pm.writable_addr()), pm.computeByteSize(),
timestamp);
frame->set_color_space(gfx::ColorSpace(*sk_color_space));
frame->AddDestructionObserver(ConvertToBaseOnceCallback(CrossThreadBindOnce(
base::DoNothing::Once<sk_sp<SkImage>>(), std::move(sk_image))));
}
auto* result = MakeGarbageCollected<VideoFrame>(
std::move(frame), ExecutionContext::From(script_state));
......@@ -226,14 +224,28 @@ VideoFrame* VideoFrame::Create(ScriptState* script_state,
// static
bool VideoFrame::IsSupportedPlanarFormat(media::VideoFrame* frame) {
// For now only I420, I420A, or NV12 in CPU or GPU memory is supported.
return frame && (frame->IsMappable() || frame->HasGpuMemoryBuffer()) &&
((frame->format() == media::PIXEL_FORMAT_I420 &&
frame->layout().num_planes() == 3) ||
(frame->format() == media::PIXEL_FORMAT_I420A &&
frame->layout().num_planes() == 4) ||
(frame->format() == media::PIXEL_FORMAT_NV12 &&
frame->layout().num_planes() == 2));
if (!frame)
return false;
if (!frame->IsMappable() && !frame->HasGpuMemoryBuffer())
return false;
const size_t num_planes = frame->layout().num_planes();
switch (frame->format()) {
case media::PIXEL_FORMAT_I420:
return num_planes == 3;
case media::PIXEL_FORMAT_I420A:
return num_planes == 4;
case media::PIXEL_FORMAT_NV12:
return num_planes == 2;
case media::PIXEL_FORMAT_XBGR:
case media::PIXEL_FORMAT_XRGB:
case media::PIXEL_FORMAT_ABGR:
case media::PIXEL_FORMAT_ARGB:
return num_planes == 1;
default:
return false;
}
}
String VideoFrame::format() const {
......@@ -247,7 +259,14 @@ String VideoFrame::format() const {
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kI420);
case media::PIXEL_FORMAT_NV12:
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kNV12);
case media::PIXEL_FORMAT_ABGR:
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kABGR);
case media::PIXEL_FORMAT_XBGR:
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kXBGR);
case media::PIXEL_FORMAT_ARGB:
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kARGB);
case media::PIXEL_FORMAT_XRGB:
return V8VideoPixelFormat(V8VideoPixelFormat::Enum::kXRGB);
default:
NOTREACHED();
return String();
......@@ -466,28 +485,32 @@ ScriptPromise VideoFrame::CreateImageBitmap(ScriptState* script_state,
}
#endif // !defined(OS_ANDROID)
const bool is_rgb = local_frame->format() == media::PIXEL_FORMAT_ARGB ||
local_frame->format() == media::PIXEL_FORMAT_XRGB ||
local_frame->format() == media::PIXEL_FORMAT_ABGR ||
local_frame->format() == media::PIXEL_FORMAT_XBGR;
if ((local_frame->IsMappable() &&
(local_frame->format() == media::PIXEL_FORMAT_I420 ||
local_frame->format() == media::PIXEL_FORMAT_I420A)) ||
(local_frame->HasTextures() &&
(local_frame->format() == media::PIXEL_FORMAT_I420 ||
local_frame->format() == media::PIXEL_FORMAT_I420A ||
local_frame->format() == media::PIXEL_FORMAT_NV12 ||
local_frame->format() == media::PIXEL_FORMAT_ABGR ||
local_frame->format() == media::PIXEL_FORMAT_XRGB))) {
local_frame->format() == media::PIXEL_FORMAT_NV12)) ||
is_rgb) {
scoped_refptr<StaticBitmapImage> image;
gfx::ColorSpace gfx_color_space = local_frame->ColorSpace();
gfx_color_space = gfx_color_space.GetWithMatrixAndRange(
gfx::ColorSpace::MatrixID::RGB, gfx::ColorSpace::RangeID::FULL);
auto sk_color_space = gfx_color_space.ToSkColorSpace();
if (!sk_color_space) {
if (!sk_color_space)
sk_color_space = SkColorSpace::MakeSRGB();
}
const bool prefer_accelerated_image_bitmap =
local_frame->format() != media::PIXEL_FORMAT_I420A &&
(BitmapSourceSize().Area() > kCpuEfficientFrameSize ||
local_frame->HasTextures());
local_frame->HasTextures()) &&
(!is_rgb || local_frame->HasTextures());
if (!prefer_accelerated_image_bitmap) {
size_t bytes_per_row = sizeof(SkColor) * cropWidth();
......
......@@ -14,6 +14,12 @@ enum VideoPixelFormat {
// 32bpp RGBA (byte-order), 1 plane.
"ABGR",
// 32bpp BGRA (byte-order), 1 plane.
"ARGB",
// 24bpp RGBX (byte-order), 1 plane.
"XBGR",
// 24bpp BGRX (byte-order), 1 plane.
"XRGB",
};
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment