Commit 0774d32e authored by Zhaoliang Ma's avatar Zhaoliang Ma Committed by Commit Bot

webcodecs: Optimize ImageBitmap to Frame with Skia's readbackYUV API

The code would previously copy SkImage data out and use libyuv to do RGB
->YUV conversion, which causes some unnecessary memory copies. This CL
use Skia's YUV420 readback API instead of doing an RGB readback and a
color space conversion afterwards.

This is verified via a demo which do 720P/30fps UserMedia->ImageBitmap->
VideoFrame->encode->decode->render. and from chrome::tracing, the
average cpu time of VideoFrame::Create is reduced from 6.462ms to2.24ms.
and the package power consumption is reduced from 8.03W to 6.02W, so
about 25%. The IA memory bandwidth is reduced about 29%.

Bug: 1102290
Test: Manually test on atlas
Change-Id: I709bef072102e9fe23e16d69a370641b969e47e1
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2494900Reviewed-by: default avatarEugene Zemtsov <eugene@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Commit-Queue: Zhaoliang Ma <zhaoliang.ma@intel.com>
Cr-Commit-Position: refs/heads/master@{#825244}
parent 59575ceb
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h" #include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h"
#include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h" #include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h"
#include "third_party/libyuv/include/libyuv.h" #include "third_party/libyuv/include/libyuv.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
namespace blink { namespace blink {
...@@ -63,6 +64,36 @@ bool IsValidSkColorType(SkColorType sk_color_type) { ...@@ -63,6 +64,36 @@ bool IsValidSkColorType(SkColorType sk_color_type) {
return false; return false;
} }
void OnYUVReadbackDone(
void* raw_frame_ptr,
std::unique_ptr<const SkImage::AsyncReadResult> async_result) {
scoped_refptr<media::VideoFrame> frame(
static_cast<media::VideoFrame*>(raw_frame_ptr));
if (!async_result) {
LOG(ERROR) << "Failed to read yuv420 back!";
return;
}
auto* data0 = static_cast<const uint8_t*>(async_result->data(0));
DCHECK(data0);
auto* data1 = static_cast<const uint8_t*>(async_result->data(1));
DCHECK(data1);
auto* data2 = static_cast<const uint8_t*>(async_result->data(2));
DCHECK(data2);
gfx::Size size = frame->coded_size();
libyuv::CopyPlane(data0, static_cast<int>(async_result->rowBytes(0)),
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane), size.width(),
size.height());
libyuv::CopyPlane(data1, static_cast<int>(async_result->rowBytes(1)),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane), size.width() / 2,
size.height() / 2);
libyuv::CopyPlane(data2, static_cast<int>(async_result->rowBytes(2)),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane), size.width() / 2,
size.height() / 2);
}
} // namespace } // namespace
VideoFrame::VideoFrame(scoped_refptr<media::VideoFrame> frame) VideoFrame::VideoFrame(scoped_refptr<media::VideoFrame> frame)
...@@ -107,6 +138,33 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source, ...@@ -107,6 +138,33 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source,
"Invalid color space"); "Invalid color space");
return nullptr; return nullptr;
} }
auto frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420, size,
rect, size, timestamp);
if (!frame) {
exception_state.ThrowDOMException(DOMExceptionCode::kNotSupportedError,
"Frame creation failed");
return nullptr;
}
bool is_texture =
source->BitmapImage()->PaintImageForCurrentFrame().IsTextureBacked();
// Now only SkImage_Gpu implemented the readbackYUV420 method, so for
// non-texture image, still use libyuv do the csc until SkImage_Base
// implement asyncRescaleAndReadPixelsYUV420.
if (is_texture) {
auto sk_image =
source->BitmapImage()->PaintImageForCurrentFrame().GetSkImage();
SkIRect src_rect = SkIRect::MakeWH(source->width(), source->height());
sk_image->asyncRescaleAndReadPixelsYUV420(
kRec709_SkYUVColorSpace, sk_color_space, src_rect,
{source->width(), source->height()}, SkImage::RescaleGamma::kSrc,
kHigh_SkFilterQuality, &OnYUVReadbackDone, frame.get());
GrDirectContext* gr_context =
source->BitmapImage()->ContextProvider()->GetGrContext();
DCHECK(gr_context);
gr_context->flushAndSubmit(/*syncCpu=*/true);
} else {
auto sk_color_type = sk_image_info.colorType(); auto sk_color_type = sk_image_info.colorType();
if (!IsValidSkColorType(sk_color_type)) { if (!IsValidSkColorType(sk_color_type)) {
exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError, exception_state.ThrowDOMException(DOMExceptionCode::kInvalidStateError,
...@@ -124,25 +182,11 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source, ...@@ -124,25 +182,11 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source,
return nullptr; return nullptr;
} }
auto frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420, size, DCHECK(sk_color_type == kRGBA_8888_SkColorType ||
rect, size, timestamp); sk_color_type == kBGRA_8888_SkColorType);
if (!frame) { auto libyuv_convert_to_i420 = (sk_color_type == kRGBA_8888_SkColorType)
exception_state.ThrowDOMException(DOMExceptionCode::kNotSupportedError, ? libyuv::ABGRToI420
"Frame creation failed"); : libyuv::ARGBToI420;
return nullptr;
}
#if SK_PMCOLOR_BYTE_ORDER(B, G, R, A)
auto libyuv_convert_to_i420 = libyuv::ARGBToI420;
#else
auto libyuv_convert_to_i420 = libyuv::ABGRToI420;
#endif
if (sk_color_type != kN32_SkColorType) {
// Swap ARGB and ABGR if not using the native pixel format.
libyuv_convert_to_i420 =
(libyuv_convert_to_i420 == libyuv::ARGBToI420 ? libyuv::ABGRToI420
: libyuv::ARGBToI420);
}
// TODO(jie.a.chen@intel.com): Use GPU to do the conversion. // TODO(jie.a.chen@intel.com): Use GPU to do the conversion.
// Full copy #2 // Full copy #2
...@@ -165,8 +209,10 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source, ...@@ -165,8 +209,10 @@ VideoFrame* VideoFrame::Create(ImageBitmap* source,
// Refer to the func below to check the actual conversion: // Refer to the func below to check the actual conversion:
// third_party/libyuv/source/row_common.cc -- RGBToY(...) // third_party/libyuv/source/row_common.cc -- RGBToY(...)
gfx_color_space = gfx_color_space.GetWithMatrixAndRange( gfx_color_space = gfx_color_space.GetWithMatrixAndRange(
gfx::ColorSpace::MatrixID::SMPTE170M, gfx::ColorSpace::RangeID::LIMITED); gfx::ColorSpace::MatrixID::SMPTE170M,
gfx::ColorSpace::RangeID::LIMITED);
frame->set_color_space(gfx_color_space); frame->set_color_space(gfx_color_space);
}
auto* result = MakeGarbageCollected<VideoFrame>(std::move(frame)); auto* result = MakeGarbageCollected<VideoFrame>(std::move(frame));
return result; return result;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment