Commit 7a647232 authored by jchen10's avatar jchen10 Committed by Commit Bot

[WebCodecs] Support GPU VideoFrame for ImageBitmap

This uses yuv_util's ConvertFromVideoFrameYUV to import VideoFrame
into SkImage converting YUV to RGB, and then creates
AcceleratedStaticBitmapImage with the RGB SkImage. Finally given
the AcceleratedStaticBitmapImage, ImageBitmap can be directly
constructed from it.
In this way, VideoFrame from either software or hardware decoder
is supported, and the format may only be NV12 or I420.
The current test case only covers software decoder. Once hardware
decoder is available, new test case should be added.

Bug: 897297
Change-Id: I961bc8506f69f86d395736af458157c53579c81e
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2242724Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Commit-Queue: Jie A Chen <jie.a.chen@intel.com>
Cr-Commit-Position: refs/heads/master@{#779627}
parent d184b92e
include_rules = [
"+base/threading/thread_task_runner_handle.h",
"+components/viz/common/gpu/raster_context_provider.h",
"+components/viz/common/resources/single_release_callback.h",
"+gpu/command_buffer/client/shared_image_interface.h",
"+media/base",
"+media/filters",
"+media/media_buildflags.h",
......
......@@ -6,13 +6,20 @@
#include <utility>
#include "components/viz/common/gpu/raster_context_provider.h"
#include "components/viz/common/resources/single_release_callback.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
#include "media/base/video_frame.h"
#include "media/base/video_frame_metadata.h"
#include "media/renderers/paint_canvas_video_renderer.h"
#include "media/renderers/yuv_util.h"
#include "third_party/blink/public/platform/platform.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_video_frame_init.h"
#include "third_party/blink/renderer/core/html/canvas/image_data.h"
#include "third_party/blink/renderer/core/imagebitmap/image_bitmap.h"
#include "third_party/blink/renderer/core/imagebitmap/image_bitmap_factories.h"
#include "third_party/blink/renderer/platform/graphics/accelerated_static_bitmap_image.h"
#include "third_party/blink/renderer/platform/graphics/gpu/shared_gpu_context.h"
#include "third_party/blink/renderer/platform/graphics/image.h"
#include "third_party/blink/renderer/platform/graphics/skia/skia_utils.h"
#include "third_party/blink/renderer/platform/graphics/unaccelerated_static_bitmap_image.h"
......@@ -147,43 +154,96 @@ IntSize VideoFrame::BitmapSourceSize() const {
return IntSize(visibleWidth(), visibleHeight());
}
bool VideoFrame::preferAcceleratedImageBitmap() const {
return BitmapSourceSize().Area() > kCpuEfficientFrameSize ||
frame_->HasTextures();
}
ScriptPromise VideoFrame::CreateImageBitmap(ScriptState* script_state,
base::Optional<IntRect> crop_rect,
const ImageBitmapOptions* options,
ExceptionState& exception_state) {
if (frame_->IsMappable() && !frame_->HasTextures()) {
size_t bytes_per_row = sizeof(SkColor) * visibleWidth();
size_t image_pixels_size = bytes_per_row * visibleHeight();
sk_sp<SkData> image_pixels = TryAllocateSkData(image_pixels_size);
if (!image_pixels) {
exception_state.ThrowDOMException(DOMExceptionCode::kBufferOverrunError,
"Out of memory.");
return ScriptPromise();
if ((frame_->IsMappable() || frame_->HasTextures()) &&
(frame_->format() == media::PIXEL_FORMAT_I420 ||
(frame_->format() == media::PIXEL_FORMAT_NV12 &&
frame_->HasTextures()))) {
scoped_refptr<StaticBitmapImage> image;
if (!preferAcceleratedImageBitmap()) {
size_t bytes_per_row = sizeof(SkColor) * visibleWidth();
size_t image_pixels_size = bytes_per_row * visibleHeight();
sk_sp<SkData> image_pixels = TryAllocateSkData(image_pixels_size);
if (!image_pixels) {
exception_state.ThrowDOMException(DOMExceptionCode::kBufferOverrunError,
"Out of memory.");
return ScriptPromise();
}
media::PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
frame_.get(), image_pixels->writable_data(), bytes_per_row);
// TODO(jie.a.chen@intel.com): Figure out the correct SkColorSpace.
sk_sp<SkColorSpace> skColorSpace = SkColorSpace::MakeSRGB();
SkImageInfo info =
SkImageInfo::Make(visibleWidth(), visibleHeight(), kN32_SkColorType,
kUnpremul_SkAlphaType, std::move(skColorSpace));
sk_sp<SkImage> skImage =
SkImage::MakeRasterData(info, image_pixels, bytes_per_row);
image = UnacceleratedStaticBitmapImage::Create(std::move(skImage));
} else {
viz::RasterContextProvider* raster_context_provider =
Platform::Current()->SharedMainThreadContextProvider();
gpu::SharedImageInterface* shared_image_interface =
raster_context_provider->SharedImageInterface();
uint32_t usage = gpu::SHARED_IMAGE_USAGE_GLES2;
if (raster_context_provider->ContextCapabilities().supports_oop_raster) {
usage |= gpu::SHARED_IMAGE_USAGE_RASTER |
gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
}
gpu::MailboxHolder dest_holder;
// Use coded_size() to comply with media::ConvertFromVideoFrameYUV.
dest_holder.mailbox = shared_image_interface->CreateSharedImage(
viz::ResourceFormat::RGBA_8888, frame_->coded_size(),
gfx::ColorSpace(), usage);
dest_holder.sync_token = shared_image_interface->GenUnverifiedSyncToken();
dest_holder.texture_target = GL_TEXTURE_2D;
media::ConvertFromVideoFrameYUV(frame_.get(), raster_context_provider,
dest_holder);
gpu::SyncToken sync_token;
raster_context_provider->RasterInterface()
->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
auto release_callback = viz::SingleReleaseCallback::Create(base::BindOnce(
[](gpu::SharedImageInterface* sii, gpu::Mailbox mailbox,
const gpu::SyncToken& sync_token, bool is_lost) {
// Ideally the SharedImage could be release here this way:
// sii->DestroySharedImage(sync_token, mailbox);
// But AcceleratedStaticBitmapImage leaks it when
// PaintImageForCurrentFrame() is called by ImageBitmap. So the
// 'sync_token' is not precise to destroy the mailbox.
},
base::Unretained(shared_image_interface), dest_holder.mailbox));
const SkImageInfo sk_image_info =
SkImageInfo::MakeN32Premul(codedWidth(), codedHeight());
image = AcceleratedStaticBitmapImage::CreateFromCanvasMailbox(
dest_holder.mailbox, sync_token, 0u, sk_image_info,
dest_holder.texture_target, true,
SharedGpuContext::ContextProviderWrapper(),
base::PlatformThread::CurrentRef(),
Thread::Current()->GetTaskRunner(), std::move(release_callback));
}
media::PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
frame_.get(), image_pixels->writable_data(), bytes_per_row);
// TODO(jie.a.chen@intel.com): Figure the correct SkColorSpace.
sk_sp<SkColorSpace> skColorSpace = SkColorSpace::MakeSRGB();
SkImageInfo info =
SkImageInfo::Make(visibleWidth(), visibleHeight(), kN32_SkColorType,
kUnpremul_SkAlphaType, std::move(skColorSpace));
sk_sp<SkImage> skImage =
SkImage::MakeRasterData(info, image_pixels, bytes_per_row);
scoped_refptr<StaticBitmapImage> image =
UnacceleratedStaticBitmapImage::Create(std::move(skImage));
return ImageBitmapSource::FulfillImageBitmap(
script_state,
MakeGarbageCollected<ImageBitmap>(std::move(image), crop_rect, options),
exception_state);
ImageBitmap* image_bitmap =
MakeGarbageCollected<ImageBitmap>(image, crop_rect, options);
return ImageBitmapSource::FulfillImageBitmap(script_state, image_bitmap,
exception_state);
}
// TODO(jie.a.chen@intel.com): AcceleratedStaticImageBitmap for hardware
// video frames.
exception_state.ThrowDOMException(
DOMExceptionCode::kNotSupportedError,
"Hardware accelerated video not supported.");
exception_state.ThrowDOMException(DOMExceptionCode::kNotSupportedError,
"Unsupported VideoFrame.");
return ScriptPromise();
}
......
......@@ -53,6 +53,8 @@ class MODULES_EXPORT VideoFrame final : public ScriptWrappable,
scoped_refptr<const media::VideoFrame> frame() const;
private:
static constexpr uint64_t kCpuEfficientFrameSize = 480u * 320u;
bool preferAcceleratedImageBitmap() const;
scoped_refptr<media::VideoFrame> frame_;
};
......
......@@ -3,34 +3,49 @@
<script src="../resources/testharnessreport.js"></script>
<script>
'use strict';
function testCanvas_0f0(ctx, assert_compares) {
let colorData = ctx.getImageData(50, 50, 1, 1).data;
assert_compares(colorData[0], 0);
assert_compares(colorData[1], 255);
assert_compares(colorData[2], 0);
assert_compares(colorData[3], 255);
function testCanvas_0f0(ctx, width, height, assert_compares) {
let colorData = ctx.getImageData(0, 0, width, height).data;
for (let i = 0; i < width * height; i += 4) {
assert_compares(colorData[0], 0);
assert_compares(colorData[1], 255);
assert_compares(colorData[2], 0);
assert_compares(colorData[3], 255);
}
}
promise_test(() => {
function testImageBitmapToAndFromVideoFrame(width, height) {
let canvas = document.createElement('canvas');
canvas.width = canvas.height = 100;
canvas.width = width;
canvas.height = height;
let ctx = canvas.getContext('2d');
ctx.fillStyle = '#0f0';
ctx.fillRect(0, 0, 100, 100);
testCanvas_0f0(ctx, assert_equals);
ctx.fillRect(0, 0, width, height);
testCanvas_0f0(ctx, width, height, assert_equals);
return createImageBitmap(canvas).then((fromImageBitmap) => {
return createImageBitmap(canvas)
.then((fromImageBitmap) => {
let videoFrame = new VideoFrame({ timestamp: 0 }, fromImageBitmap);
videoFrame.createImageBitmap().then((toImageBitmap) => {
return videoFrame.createImageBitmap();
})
.then((toImageBitmap) => {
let myCanvas = document.createElement('canvas');
myCanvas.width = myCanvas.height = 100;
myCanvas.width = width;
myCanvas.height = height;
let myCtx = myCanvas.getContext('2d');
myCtx.drawImage(toImageBitmap, 0, 0);
testCanvas_0f0(myCtx, (actual, expected) => {
assert_approx_equals(actual, expected, 1);
let tolerance = 1;
testCanvas_0f0(myCtx, width, height, (actual, expected) => {
assert_approx_equals(actual, expected, tolerance);
});
});
});
}, 'Test should not return any error');
}
promise_test(() => {
return testImageBitmapToAndFromVideoFrame(320, 240);
}, 'ImageBitmap<->VideoFrame with canvas size 320x240.');
promise_test(() => {
return testImageBitmapToAndFromVideoFrame(640, 480);
}, 'ImageBitmap<->VideoFrame with canvas size 640x480.');
</script>
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment