Commit 0774d32e authored by Zhaoliang Ma's avatar Zhaoliang Ma Committed by Commit Bot

webcodecs: Optimize ImageBitmap to Frame with Skia's readbackYUV API

The code would previously copy SkImage data out and use libyuv to do RGB
->YUV conversion, which causes some unnecessary memory copies. This CL
use Skia's YUV420 readback API instead of doing an RGB readback and a
color space conversion afterwards.

This is verified via a demo which do 720P/30fps UserMedia->ImageBitmap->
VideoFrame->encode->decode->render. and from chrome::tracing, the
average cpu time of VideoFrame::Create is reduced from 6.462ms to2.24ms.
and the package power consumption is reduced from 8.03W to 6.02W, so
about 25%. The IA memory bandwidth is reduced about 29%.

Bug: 1102290
Test: Manually test on atlas
Change-Id: I709bef072102e9fe23e16d69a370641b969e47e1
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2494900Reviewed-by: default avatarEugene Zemtsov <eugene@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Commit-Queue: Zhaoliang Ma <zhaoliang.ma@intel.com>
Cr-Commit-Position: refs/heads/master@{#825244}
parent 59575ceb
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h" #include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h"
#include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h" #include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h"
#include "third_party/libyuv/include/libyuv.h" #include "third_party/libyuv/include/libyuv.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
namespace blink { namespace blink {
...@@ -63,6 +64,36 @@ bool IsValidSkColorType(SkColorType sk_color_type) { ...@@ -63,6 +64,36 @@ bool IsValidSkColorType(SkColorType sk_color_type) {
return false; return false;
} }
void OnYUVReadbackDone(
void* raw_frame_ptr,
std::unique_ptr<const SkImage::AsyncReadResult> async_result) {
scoped_refptr<media::VideoFrame> frame(
static_cast<media::VideoFrame*>(raw_frame_ptr));
if (!async_result) {
LOG(ERROR) << "Failed to read yuv420 back!";
return;
}
auto* data0 = static_cast<const uint8_t*>(async_result->data(0));
DCHECK(data0);
auto* data1 = static_cast<const uint8_t*>(async_result->data(1));
DCHECK(data1);
auto* data2 = static_cast<const uint8_t*>(async_result->data(2));
DCHECK(data2);
gfx::Size size = frame->coded_size();
libyuv::CopyPlane(data0, static_cast<int>(async_result->rowBytes(0)),
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane), size.width(),
size.height());
libyuv::CopyPlane(data1, static_cast<int>(async_result->rowBytes(1)),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane), size.width() / 2,
size.height() / 2);
libyuv::CopyPlane(data2, static_cast<int>(async_result->rowBytes(2)),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane), size.width() / 2,
size.height() / 2);
}
} // namespace } // namespace
VideoFrame::VideoFrame(scoped_refptr<media::VideoFrame> frame) VideoFrame::VideoFrame(scoped_refptr<media::VideoFrame> frame)
...@@ -107,22 +138,6 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source, ...@@ -107,22 +138,6 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source,
"Invalid color space"); "Invalid color space");
return nullptr; return nullptr;
} }
auto sk_color_type = sk_image_info.colorType();
if (!IsValidSkColorType(sk_color_type)) {
exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
"Invalid pixel format");
return nullptr;
}
// TODO(jie.a.chen@intel.com): Handle data of float type.
// Full copy #1
WTF::Vector<uint8_t> pixel_data = source->CopyBitmapData();
if (pixel_data.size() <
media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_ARGB, size)) {
exception_state.ThrowDOMException(DOMExceptionCode::kBufferOverrunError,
"Image buffer is too small.");
return nullptr;
}
auto frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420, size, auto frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420, size,
rect, size, timestamp); rect, size, timestamp);
...@@ -132,41 +147,72 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source, ...@@ -132,41 +147,72 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source,
return nullptr; return nullptr;
} }
#if SK_PMCOLOR_BYTE_ORDER(B, G, R, A) bool is_texture =
auto libyuv_convert_to_i420 = libyuv::ARGBToI420; source->BitmapImage()->PaintImageForCurrentFrame().IsTextureBacked();
#else // Now only SkImage_Gpu implemented the readbackYUV420 method, so for
auto libyuv_convert_to_i420 = libyuv::ABGRToI420; // non-texture image, still use libyuv do the csc until SkImage_Base
#endif // implement asyncRescaleAndReadPixelsYUV420.
if (sk_color_type != kN32_SkColorType) { if (is_texture) {
// Swap ARGB and ABGR if not using the native pixel format. auto sk_image =
libyuv_convert_to_i420 = source->BitmapImage()->PaintImageForCurrentFrame().GetSkImage();
(libyuv_convert_to_i420 == libyuv::ARGBToI420 ? libyuv::ABGRToI420 SkIRect src_rect = SkIRect::MakeWH(source->width(), source->height());
: libyuv::ARGBToI420); sk_image->asyncRescaleAndReadPixelsYUV420(
} kRec709_SkYUVColorSpace, sk_color_space, src_rect,
{source->width(), source->height()}, SkImage::RescaleGamma::kSrc,
kHigh_SkFilterQuality, &OnYUVReadbackDone, frame.get());
GrDirectContext* gr_context =
source->BitmapImage()->ContextProvider()->GetGrContext();
DCHECK(gr_context);
gr_context->flushAndSubmit(/*syncCpu=*/true);
} else {
auto sk_color_type = sk_image_info.colorType();
if (!IsValidSkColorType(sk_color_type)) {
exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
"Invalid pixel format");
return nullptr;
}
// TODO(jie.a.chen@intel.com): Use GPU to do the conversion. // TODO(jie.a.chen@intel.com): Handle data of float type.
// Full copy #2 // Full copy #1
int error = WTF::Vector<uint8_t> pixel_data = source->CopyBitmapData();
libyuv_convert_to_i420(pixel_data.data(), source->width() * 4, if (pixel_data.size() <
frame->visible_data(media::VideoFrame::kYPlane), media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_ARGB, size)) {
frame->stride(media::VideoFrame::kYPlane), exception_state.ThrowDOMException(DOMExceptionCode::kBufferOverrunError,
frame->visible_data(media::VideoFrame::kUPlane), "Image buffer is too small.");
frame->stride(media::VideoFrame::kUPlane), return nullptr;
frame->visible_data(media::VideoFrame::kVPlane), }
frame->stride(media::VideoFrame::kVPlane),
source->width(), source->height()); DCHECK(sk_color_type == kRGBA_8888_SkColorType ||
if (error) { sk_color_type == kBGRA_8888_SkColorType);
exception_state.ThrowDOMException(DOMExceptionCode::kOperationError, auto libyuv_convert_to_i420 = (sk_color_type == kRGBA_8888_SkColorType)
"ARGB to YUV420 conversion error"); ? libyuv::ABGRToI420
return nullptr; : libyuv::ARGBToI420;
// TODO(jie.a.chen@intel.com): Use GPU to do the conversion.
// Full copy #2
int error =
libyuv_convert_to_i420(pixel_data.data(), source->width() * 4,
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane),
source->width(), source->height());
if (error) {
exception_state.ThrowDOMException(DOMExceptionCode::kOperationError,
"ARGB to YUV420 conversion error");
return nullptr;
}
gfx::ColorSpace gfx_color_space(*sk_color_space);
// 'libyuv_convert_to_i420' assumes SMPTE170M.
// Refer to the func below to check the actual conversion:
// third_party/libyuv/source/row_common.cc -- RGBToY(...)
gfx_color_space = gfx_color_space.GetWithMatrixAndRange(
gfx::ColorSpace::MatrixID::SMPTE170M,
gfx::ColorSpace::RangeID::LIMITED);
frame->set_color_space(gfx_color_space);
} }
gfx::ColorSpace gfx_color_space(*sk_color_space);
// 'libyuv_convert_to_i420' assumes SMPTE170M.
// Refer to the func below to check the actual conversion:
// third_party/libyuv/source/row_common.cc -- RGBToY(...)
gfx_color_space = gfx_color_space.GetWithMatrixAndRange(
gfx::ColorSpace::MatrixID::SMPTE170M, gfx::ColorSpace::RangeID::LIMITED);
frame->set_color_space(gfx_color_space);
auto* result = MakeGarbageCollected<VideoFrame>(std::move(frame)); auto* result = MakeGarbageCollected<VideoFrame>(std::move(frame));
return result; return result;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment