Commit c247207a authored by Dale Curtis's avatar Dale Curtis Committed by Commit Bot

Remove unnecessary yuv -> rgba -> yuv conversion for <video> capture.

The HtmlVideoElementCapturerSource was unnecessarily rendering
media::VideoFrames to a RGBA canvas then converting them back to
YUV based media::VideoFrames. We can instead just pass the frame
directly through and avoid this process.

Clients which need this conversion (e.g., MediaRecorder) already
have mechanisms for converting texture backed VideoFrame objects
to YUV. They weren't comprehensive, so I've made them so in this
change.

Future optimizations could be done to allow the encoders to
actually handle I422, I44 and HBD content. Right now they are
just turned into I420 frames like they were in the capturer.

This change also moves the VideoFramePool from the capturer to the
encoder where one was missing previously; which should improve
allocation efficiency and overhead.

This change also fixes some paths of video capture not respecting
the no-protected-content capture.

Bug: None
Test: All existing tests pass.
Change-Id: I6f58f80e6137807fbf0be817be20f03928a1da23
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2464916Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarMarkus Handell <handellm@google.com>
Reviewed-by: default avatarXiaohan Wang <xhwang@chromium.org>
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/master@{#817226}
parent cb1d5c5f
......@@ -1345,14 +1345,10 @@ void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
// We can't copy from protected frames.
if (cdm_context_ref_)
return;
scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
gfx::Rect gfx_rect(rect);
if (video_frame.get() && video_frame->HasTextures()) {
if (video_frame && video_frame->HasTextures()) {
if (!raster_context_provider_)
return; // Unable to get/create a shared main thread context.
if (!raster_context_provider_->GrContext())
......@@ -1373,6 +1369,10 @@ void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
raster_context_provider_.get());
}
scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrame() {
return GetCurrentFrameFromCompositor();
}
bool WebMediaPlayerImpl::WouldTaintOrigin() const {
if (demuxer_found_hls_) {
// HLS manifests might pull segments from a different origin. We can't know
......@@ -1437,14 +1437,11 @@ bool WebMediaPlayerImpl::CopyVideoTextureToPlatformTexture(
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
// We can't copy from protected frames.
if (cdm_context_ref_)
return false;
scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
if (!video_frame.get() || !video_frame->HasTextures()) {
if (!video_frame || !video_frame->HasTextures()) {
return false;
}
if (out_metadata) {
// WebGL last-uploaded-frame-metadata API is enabled.
// https://crbug.com/639174
......@@ -2968,6 +2965,10 @@ scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrameFromCompositor()
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
// We can't copy from protected frames.
if (cdm_context_ref_)
return nullptr;
// Can be null.
scoped_refptr<VideoFrame> video_frame =
compositor_->GetCurrentFrameOnAnyThread();
......
......@@ -149,6 +149,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
cc::PaintFlags& flags,
int already_uploaded_id,
VideoFrameUploadMetadata* out_metadata) override;
scoped_refptr<VideoFrame> GetCurrentFrame() override;
// True if the loaded media has a playable video/audio track.
bool HasVideo() const override;
......
......@@ -31,6 +31,7 @@
#ifndef THIRD_PARTY_BLINK_PUBLIC_PLATFORM_WEB_MEDIA_PLAYER_H_
#define THIRD_PARTY_BLINK_PUBLIC_PLATFORM_WEB_MEDIA_PLAYER_H_
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/time/time.h"
#include "components/viz/common/surfaces/surface_id.h"
......@@ -54,6 +55,10 @@ class GLES2Interface;
}
}
namespace media {
class VideoFrame;
}
namespace blink {
class WebContentDecryptionModule;
......@@ -263,6 +268,12 @@ class WebMediaPlayer {
int already_uploaded_id = -1,
VideoFrameUploadMetadata* out_metadata = nullptr) = 0;
// Similar to Paint(), but just returns the frame directly instead of trying
// to upload or convert it. Note: This may kick off a process to update the
// current frame for a future call in some cases. Returns nullptr if no frame
// is available.
virtual scoped_refptr<media::VideoFrame> GetCurrentFrame() = 0;
// Do a GPU-GPU texture copy of the current video frame to |texture|,
// reallocating |texture| at the appropriate size with given internal
// format, format, and type if necessary.
......
......@@ -128,6 +128,7 @@ class BLINK_MODULES_EXPORT WebMediaPlayerMS
cc::PaintFlags& flags,
int already_uploaded_id,
VideoFrameUploadMetadata* out_metadata) override;
scoped_refptr<media::VideoFrame> GetCurrentFrame() override;
media::PaintCanvasVideoRenderer* GetPaintCanvasVideoRenderer();
void ResetCanvasCache();
......
......@@ -9,9 +9,7 @@
#include "base/memory/ptr_util.h"
#include "base/single_thread_task_runner.h"
#include "base/trace_event/trace_event.h"
#include "cc/paint/skia_paint_canvas.h"
#include "media/base/limits.h"
#include "skia/ext/platform_canvas.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_rect.h"
#include "third_party/blink/public/platform/web_size.h"
......@@ -20,10 +18,9 @@
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
#include "third_party/libyuv/include/libyuv.h"
namespace {
const float kMinFramesPerSecond = 1.0;
constexpr float kMinFramesPerSecond = 1.0;
} // anonymous namespace
namespace blink {
......@@ -50,7 +47,6 @@ HtmlVideoElementCapturerSource::HtmlVideoElementCapturerSource(
: web_media_player_(player),
io_task_runner_(io_task_runner),
task_runner_(task_runner),
is_opaque_(player->IsOpaque()),
capture_frame_rate_(0.0) {
DCHECK(web_media_player_);
}
......@@ -125,79 +121,18 @@ void HtmlVideoElementCapturerSource::sendNewFrame() {
if (start_capture_time_.is_null())
start_capture_time_ = current_time;
if (!canvas_ || is_opaque_ != web_media_player_->IsOpaque()) {
// TODO(crbug.com/964494): Avoid the explicit conversion to gfx::Size here.
// TODO(sandersd): Implement support for size changes rather than scaling.
if (!canvas_)
natural_size_ = gfx::Size(web_media_player_->NaturalSize());
is_opaque_ = web_media_player_->IsOpaque();
if (!bitmap_.tryAllocPixels(SkImageInfo::MakeN32(
natural_size_.width(), natural_size_.height(),
is_opaque_ ? kOpaque_SkAlphaType : kPremul_SkAlphaType))) {
running_callback_.Run(false);
return;
}
canvas_ = std::make_unique<cc::SkiaPaintCanvas>(bitmap_);
}
cc::PaintFlags flags;
flags.setBlendMode(SkBlendMode::kSrc);
flags.setFilterQuality(kLow_SkFilterQuality);
// TODO(crbug.com/964494): Avoid the explicit conversion to blink::WebRect
// here.
web_media_player_->Paint(canvas_.get(),
blink::WebRect(gfx::Rect(natural_size_)), flags);
DCHECK_NE(kUnknown_SkColorType, canvas_->imageInfo().colorType());
DCHECK_NE(kUnknown_SkColorType, bitmap_.colorType());
DCHECK(!bitmap_.drawsNothing());
DCHECK(bitmap_.getPixels());
if (bitmap_.colorType() != kN32_SkColorType) {
DLOG(ERROR) << "Only supported color type is kN32_SkColorType (ARGB/ABGR)";
return;
}
scoped_refptr<media::VideoFrame> frame = frame_pool_.CreateFrame(
is_opaque_ ? media::PIXEL_FORMAT_I420 : media::PIXEL_FORMAT_I420A,
natural_size_, gfx::Rect(natural_size_), natural_size_,
current_time - start_capture_time_);
#if SK_PMCOLOR_BYTE_ORDER(R, G, B, A)
const uint32_t source_pixel_format = libyuv::FOURCC_ABGR;
#else
const uint32_t source_pixel_format = libyuv::FOURCC_ARGB;
#endif
if (frame &&
libyuv::ConvertToI420(
static_cast<uint8_t*>(bitmap_.getPixels()), bitmap_.computeByteSize(),
frame->visible_data(media::VideoFrame::kYPlane),
frame->stride(media::VideoFrame::kYPlane),
frame->visible_data(media::VideoFrame::kUPlane),
frame->stride(media::VideoFrame::kUPlane),
frame->visible_data(media::VideoFrame::kVPlane),
frame->stride(media::VideoFrame::kVPlane), 0 /* crop_x */,
0 /* crop_y */, frame->visible_rect().size().width(),
frame->visible_rect().size().height(), bitmap_.info().width(),
bitmap_.info().height(), libyuv::kRotate0,
source_pixel_format) == 0) {
if (!is_opaque_) {
// OK to use ARGB...() because alpha has the same alignment for both ABGR
// and ARGB.
libyuv::ARGBExtractAlpha(
static_cast<uint8_t*>(bitmap_.getPixels()),
static_cast<int>(bitmap_.rowBytes()) /* stride */,
frame->visible_data(media::VideoFrame::kAPlane),
frame->stride(media::VideoFrame::kAPlane), bitmap_.info().width(),
bitmap_.info().height());
} // Success!
if (auto frame = web_media_player_->GetCurrentFrame()) {
auto new_frame = media::VideoFrame::WrapVideoFrame(
frame, frame->format(), frame->visible_rect(), frame->natural_size());
new_frame->set_timestamp(current_time - start_capture_time_);
// Post with CrossThreadBind here, instead of CrossThreadBindOnce,
// otherwise the |new_frame_callback_| ivar can be nulled out
// unintentionally.
PostCrossThreadTask(
*io_task_runner_, FROM_HERE,
CrossThreadBindOnce(new_frame_callback_, frame, current_time));
CrossThreadBindOnce(new_frame_callback_, std::move(new_frame),
current_time));
}
// Calculate the time in the future where the next frame should be created.
......
......@@ -9,22 +9,15 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "media/base/video_frame_pool.h"
#include "media/base/video_types.h"
#include "media/capture/video_capturer_source.h"
#include "third_party/blink/public/platform/web_size.h"
#include "third_party/blink/renderer/modules/modules_export.h"
#include "third_party/skia/include/core/SkBitmap.h"
#include "third_party/skia/include/core/SkRefCnt.h"
namespace base {
class SingleThreadTaskRunner;
}
namespace cc {
class PaintCanvas;
} // namespace cc
namespace blink {
class WebMediaPlayer;
......@@ -61,17 +54,12 @@ class MODULES_EXPORT HtmlVideoElementCapturerSource final
// it into a format suitable for MediaStreams.
void sendNewFrame();
media::VideoFramePool frame_pool_;
SkBitmap bitmap_;
std::unique_ptr<cc::PaintCanvas> canvas_;
gfx::Size natural_size_;
const base::WeakPtr<blink::WebMediaPlayer> web_media_player_;
const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
bool is_opaque_;
// These three configuration items are passed on StartCapture();
RunningCallback running_callback_;
VideoCaptureDeliverFrameCB new_frame_callback_;
......
......@@ -15,6 +15,7 @@
#include "cc/paint/paint_canvas.h"
#include "cc/paint/paint_flags.h"
#include "media/base/limits.h"
#include "media/base/video_frame.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
......@@ -87,11 +88,17 @@ class MockWebMediaPlayer : public WebMediaPlayer {
cc::PaintFlags&,
int already_uploaded_id,
VideoFrameUploadMetadata* out_metadata) override {
return;
}
scoped_refptr<media::VideoFrame> GetCurrentFrame() override {
// We could fill in |canvas| with a meaningful pattern in ARGB and verify
// that is correctly captured (as I420) by HTMLVideoElementCapturerSource
// but I don't think that'll be easy/useful/robust, so just let go here.
return;
return is_video_opaque_ ? media::VideoFrame::CreateBlackFrame(size_)
: media::VideoFrame::CreateTransparentFrame(size_);
}
bool IsOpaque() const override { return is_video_opaque_; }
bool HasAvailableVideoFrame() const override { return true; }
......
......@@ -264,24 +264,23 @@ void VideoTrackRecorderImpl::Encoder::StartFrameEncode(
if (paused_)
return;
if (!(video_frame->format() == media::PIXEL_FORMAT_I420 ||
video_frame->format() == media::PIXEL_FORMAT_ARGB ||
video_frame->format() == media::PIXEL_FORMAT_ABGR ||
video_frame->format() == media::PIXEL_FORMAT_I420A ||
video_frame->format() == media::PIXEL_FORMAT_NV12 ||
video_frame->format() == media::PIXEL_FORMAT_XRGB)) {
NOTREACHED() << media::VideoPixelFormatToString(video_frame->format());
return;
}
const bool is_format_supported =
video_frame->format() == media::PIXEL_FORMAT_I420 ||
video_frame->format() == media::PIXEL_FORMAT_ARGB ||
video_frame->format() == media::PIXEL_FORMAT_ABGR ||
video_frame->format() == media::PIXEL_FORMAT_I420A ||
video_frame->format() == media::PIXEL_FORMAT_NV12 ||
video_frame->format() == media::PIXEL_FORMAT_XRGB;
if (num_frames_in_encode_->count() > kMaxNumberOfFramesInEncode) {
DLOG(WARNING) << "Too many frames are queued up. Dropping this one.";
return;
}
if (video_frame->HasTextures() &&
video_frame->storage_type() !=
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
if (!is_format_supported ||
(video_frame->HasTextures() &&
video_frame->storage_type() !=
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER)) {
PostCrossThreadTask(
*main_task_runner_.get(), FROM_HERE,
CrossThreadBindOnce(&Encoder::RetrieveFrameOnMainThread,
......@@ -333,14 +332,11 @@ void VideoTrackRecorderImpl::Encoder::RetrieveFrameOnMainThread(
} else {
// Accelerated decoders produce ARGB/ABGR texture-backed frames (see
// https://crbug.com/585242), fetch them using a PaintCanvasVideoRenderer.
// Additionally, Macintosh accelerated decoders can produce XRGB content
// Additionally, macOS accelerated decoders can produce XRGB content
// and are treated the same way.
//
// TODO(crbug/1023390): Add browsertest for these.
DCHECK(video_frame->HasTextures());
DCHECK(video_frame->format() == media::PIXEL_FORMAT_ARGB ||
video_frame->format() == media::PIXEL_FORMAT_ABGR ||
video_frame->format() == media::PIXEL_FORMAT_XRGB);
// This path is also used for less common formats like I422, I444, and
// high bit depth pixel formats.
const gfx::Size& old_visible_size = video_frame->visible_rect().size();
gfx::Size new_visible_size = old_visible_size;
......@@ -353,13 +349,16 @@ void VideoTrackRecorderImpl::Encoder::RetrieveFrameOnMainThread(
old_visible_size.width());
}
frame = media::VideoFrame::CreateFrame(
media::PIXEL_FORMAT_I420, new_visible_size, gfx::Rect(new_visible_size),
new_visible_size, video_frame->timestamp());
const bool is_opaque = media::IsOpaque(video_frame->format());
frame = frame_pool_.CreateFrame(
is_opaque ? media::PIXEL_FORMAT_I420 : media::PIXEL_FORMAT_I420A,
new_visible_size, gfx::Rect(new_visible_size), new_visible_size,
video_frame->timestamp());
const SkImageInfo info = SkImageInfo::MakeN32(
frame->visible_rect().width(), frame->visible_rect().height(),
kOpaque_SkAlphaType);
is_opaque ? kOpaque_SkAlphaType : kPremul_SkAlphaType);
// Create |surface_| if it doesn't exist or incoming resolution has changed.
if (!canvas_ || canvas_->imageInfo().width() != info.width() ||
......@@ -400,6 +399,14 @@ void VideoTrackRecorderImpl::Encoder::RetrieveFrameOnMainThread(
DLOG(ERROR) << "Error converting frame to I420";
return;
}
if (!is_opaque) {
// Alpha has the same alignment for both ABGR and ARGB.
libyuv::ARGBExtractAlpha(static_cast<uint8_t*>(pixmap.writable_addr()),
static_cast<int>(pixmap.rowBytes()) /* stride */,
frame->visible_data(media::VideoFrame::kAPlane),
frame->stride(media::VideoFrame::kAPlane),
pixmap.width(), pixmap.height());
}
}
PostCrossThreadTask(
......@@ -448,7 +455,7 @@ VideoTrackRecorderImpl::Encoder::ConvertToI420ForSoftwareEncoder(
auto* gmb = frame->GetGpuMemoryBuffer();
if (!gmb->Map())
return frame;
scoped_refptr<media::VideoFrame> i420_frame = media::VideoFrame::CreateFrame(
scoped_refptr<media::VideoFrame> i420_frame = frame_pool_.CreateFrame(
media::VideoPixelFormat::PIXEL_FORMAT_I420, frame->coded_size(),
frame->visible_rect(), frame->natural_size(), frame->timestamp());
auto ret = libyuv::NV12ToI420(
......
......@@ -14,6 +14,7 @@
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/video_frame_pool.h"
#include "media/muxers/webm_muxer.h"
#include "media/video/video_encode_accelerator.h"
#include "third_party/blink/public/common/media/video_capture.h"
......@@ -196,7 +197,7 @@ class VideoTrackRecorder : public TrackRecorder<MediaStreamVideoSink> {
// Used mainly by the software encoders since I420 is the only supported
// pixel format. The function is best-effort. If for any reason the
// conversion fails, the original |frame| will be returned.
static scoped_refptr<media::VideoFrame> ConvertToI420ForSoftwareEncoder(
scoped_refptr<media::VideoFrame> ConvertToI420ForSoftwareEncoder(
scoped_refptr<media::VideoFrame> frame);
// Used to shutdown properly on the same thread we were created.
......@@ -233,6 +234,8 @@ class VideoTrackRecorder : public TrackRecorder<MediaStreamVideoSink> {
SkBitmap bitmap_;
std::unique_ptr<cc::PaintCanvas> canvas_;
media::VideoFramePool frame_pool_;
DISALLOW_COPY_AND_ASSIGN(Encoder);
};
......
......@@ -946,6 +946,12 @@ void WebMediaPlayerMS::Paint(cc::PaintCanvas* canvas,
provider.get());
}
scoped_refptr<media::VideoFrame> WebMediaPlayerMS::GetCurrentFrame() {
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return compositor_->GetCurrentFrame();
}
bool WebMediaPlayerMS::WouldTaintOrigin() const {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return false;
......
......@@ -19,7 +19,7 @@ include_rules = [
"+cc",
"+mojo/core/embedder",
"+services/network/public",
'+testing',
"+testing",
"+third_party/blink/renderer/platform/context_lifecycle_notifier.h",
"+third_party/blink/renderer/platform/context_lifecycle_observer.h",
......@@ -50,10 +50,13 @@ include_rules = [
]
specific_include_rules = {
'blink_fuzzer_test_support\.cc': [
"blink_fuzzer_test_support\.cc": [
"+content/public/test/blink_test_environment.h",
],
'testing_platform_support_with_mock_scheduler\.cc': [
"empty_web_media_player\.cc": [
"+media/base/video_frame.h",
],
"testing_platform_support_with_mock_scheduler\.cc": [
"+base/task/sequence_manager/test/sequence_manager_for_test.h",
],
"video_frame_utils\.cc": [
......
......@@ -4,6 +4,7 @@
#include "third_party/blink/renderer/platform/testing/empty_web_media_player.h"
#include "media/base/video_frame.h"
#include "third_party/blink/public/platform/web_size.h"
#include "third_party/blink/public/platform/web_time_range.h"
......@@ -34,4 +35,8 @@ WebString EmptyWebMediaPlayer::GetErrorMessage() const {
return WebString();
}
scoped_refptr<media::VideoFrame> EmptyWebMediaPlayer::GetCurrentFrame() {
return nullptr;
}
} // namespace blink
......@@ -65,6 +65,7 @@ class EmptyWebMediaPlayer : public WebMediaPlayer {
cc::PaintFlags&,
int already_uploaded_id,
VideoFrameUploadMetadata*) override {}
scoped_refptr<media::VideoFrame> GetCurrentFrame() override;
bool HasAvailableVideoFrame() const override { return false; }
base::WeakPtr<WebMediaPlayer> AsWeakPtr() override { return nullptr; }
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment