Commit 093f8346 authored by Evan Shrubsole's avatar Evan Shrubsole Committed by Chromium LUCI CQ

Handle SW NV12 frames in WebRtcVideoTrackSource

When hardware acceleration was disabled, the WebRtcVideoTrackSource can
receive NV12 frames in software. This patch adds support for the NV12
frames in WebRtcVideoFrameAdapter so that the WebRtcVideoTrackSource can
pass those frames safely.

with hardware acceleration off and confirmed that it worked. I also
confirmed that the NV12 frames in software are used as expected from
WebRTC, as they would be if they were from a GMB.

Test: https://webrtc.github.io/samples/src/content/peerconnection/pc1/
Bug: 1168948
Change-Id: I5b78ec01fe6ef1c4a4a047bb3fda908ad6813e4b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2645084
Commit-Queue: Evan Shrubsole <eshr@google.com>
Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Reviewed-by: default avatarMarkus Handell <handellm@google.com>
Cr-Commit-Position: refs/heads/master@{#846198}
parent ffea1ac9
......@@ -141,9 +141,9 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
scoped_refptr<media::VideoFrame> frame) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
TRACE_EVENT0("media", "WebRtcVideoSource::OnFrameCaptured");
if (!(frame->IsMappable() &&
(frame->format() == media::PIXEL_FORMAT_I420 ||
frame->format() == media::PIXEL_FORMAT_I420A)) &&
if (!(frame->IsMappable() && (frame->format() == media::PIXEL_FORMAT_I420 ||
frame->format() == media::PIXEL_FORMAT_I420A ||
frame->format() == media::PIXEL_FORMAT_NV12)) &&
!(frame->storage_type() ==
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) &&
!frame->HasTextures()) {
......
......@@ -352,40 +352,68 @@ WebRtcVideoFrameAdapter::CreateFrameAdapter() const {
}
IsValidFrame(*frame_);
// If the frame is a software frame then it can be in I420, I420A or NV12.
// TODO(https://crbug.com/1169727): Move this check to somewhere else, and add
// tests for all pixel formats.
DCHECK(frame_->format() == media::PIXEL_FORMAT_NV12 ||
frame_->format() == media::PIXEL_FORMAT_I420 ||
frame_->format() == media::PIXEL_FORMAT_I420A)
<< "Can not scale software frame of format "
<< media::VideoPixelFormatToString(frame_->format());
// Since scaling is required, hard-apply both the cropping and scaling
// before we hand the frame over to WebRTC.
const bool has_alpha = frame_->format() == media::PIXEL_FORMAT_I420A;
gfx::Size scaled_size = frame_->natural_size();
scoped_refptr<media::VideoFrame> scaled_frame = frame_;
if (scaled_size != frame_->visible_rect().size()) {
CHECK(scaled_frame_pool_);
scaled_frame = scaled_frame_pool_->CreateFrame(
has_alpha ? media::PIXEL_FORMAT_I420A : media::PIXEL_FORMAT_I420,
scaled_size, gfx::Rect(scaled_size), scaled_size, frame_->timestamp());
libyuv::I420Scale(
frame_->visible_data(media::VideoFrame::kYPlane),
frame_->stride(media::VideoFrame::kYPlane),
frame_->visible_data(media::VideoFrame::kUPlane),
frame_->stride(media::VideoFrame::kUPlane),
frame_->visible_data(media::VideoFrame::kVPlane),
frame_->stride(media::VideoFrame::kVPlane),
frame_->visible_rect().width(), frame_->visible_rect().height(),
scaled_frame->data(media::VideoFrame::kYPlane),
scaled_frame->stride(media::VideoFrame::kYPlane),
scaled_frame->data(media::VideoFrame::kUPlane),
scaled_frame->stride(media::VideoFrame::kUPlane),
scaled_frame->data(media::VideoFrame::kVPlane),
scaled_frame->stride(media::VideoFrame::kVPlane), scaled_size.width(),
scaled_size.height(), libyuv::kFilterBilinear);
if (has_alpha) {
libyuv::ScalePlane(
frame_->visible_data(media::VideoFrame::kAPlane),
frame_->stride(media::VideoFrame::kAPlane),
frame_->visible_rect().width(), frame_->visible_rect().height(),
scaled_frame->data(media::VideoFrame::kAPlane),
scaled_frame->stride(media::VideoFrame::kAPlane), scaled_size.width(),
scaled_size.height(), libyuv::kFilterBilinear);
frame_->format(), scaled_size, gfx::Rect(scaled_size), scaled_size,
frame_->timestamp());
switch (frame_->format()) {
case media::PIXEL_FORMAT_I420A:
libyuv::ScalePlane(
frame_->visible_data(media::VideoFrame::kAPlane),
frame_->stride(media::VideoFrame::kAPlane),
frame_->visible_rect().width(), frame_->visible_rect().height(),
scaled_frame->data(media::VideoFrame::kAPlane),
scaled_frame->stride(media::VideoFrame::kAPlane),
scaled_size.width(), scaled_size.height(), libyuv::kFilterBilinear);
// Fallthrough to I420 in order to scale the YUV planes as well.
ABSL_FALLTHROUGH_INTENDED;
case media::PIXEL_FORMAT_I420:
libyuv::I420Scale(
frame_->visible_data(media::VideoFrame::kYPlane),
frame_->stride(media::VideoFrame::kYPlane),
frame_->visible_data(media::VideoFrame::kUPlane),
frame_->stride(media::VideoFrame::kUPlane),
frame_->visible_data(media::VideoFrame::kVPlane),
frame_->stride(media::VideoFrame::kVPlane),
frame_->visible_rect().width(), frame_->visible_rect().height(),
scaled_frame->data(media::VideoFrame::kYPlane),
scaled_frame->stride(media::VideoFrame::kYPlane),
scaled_frame->data(media::VideoFrame::kUPlane),
scaled_frame->stride(media::VideoFrame::kUPlane),
scaled_frame->data(media::VideoFrame::kVPlane),
scaled_frame->stride(media::VideoFrame::kVPlane),
scaled_size.width(), scaled_size.height(), libyuv::kFilterBilinear);
break;
case media::PIXEL_FORMAT_NV12:
libyuv::NV12Scale(
frame_->visible_data(media::VideoFrame::kYPlane),
frame_->stride(media::VideoFrame::kYPlane),
frame_->visible_data(media::VideoFrame::kUVPlane),
frame_->stride(media::VideoFrame::kUVPlane),
frame_->visible_rect().width(), frame_->visible_rect().height(),
scaled_frame->data(media::VideoFrame::kYPlane),
scaled_frame->stride(media::VideoFrame::kYPlane),
scaled_frame->data(media::VideoFrame::kUVPlane),
scaled_frame->stride(media::VideoFrame::kUVPlane),
scaled_size.width(), scaled_size.height(), libyuv::kFilterBilinear);
break;
default:
NOTREACHED();
}
}
return MakeFrameAdapter(std::move(scaled_frame));
......
......@@ -15,6 +15,19 @@
namespace blink {
namespace {
scoped_refptr<media::VideoFrame> CreateTestMemoryFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
media::VideoPixelFormat pixel_format) {
return media::VideoFrame::CreateFrame(pixel_format, coded_size, visible_rect,
natural_size, base::TimeDelta());
}
} // namespace
TEST(WebRtcVideoFrameAdapterTest, WidthAndHeight) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
......@@ -101,6 +114,32 @@ TEST(WebRtcVideoFrameAdapterTest, ToI420DownScaleGmb) {
EXPECT_EQ(get_i420_frame->height(), kNaturalSize.height());
}
TEST(WebRtcVideoFrameAdapterTest, ToI420ADownScale) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
scoped_refptr<WebRtcVideoFrameAdapter::BufferPoolOwner> pool =
new WebRtcVideoFrameAdapter::BufferPoolOwner();
// The adapter should report width and height from the natural size for
// VideoFrame backed by owned memory.
auto owned_memory_frame =
CreateTestMemoryFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoPixelFormat::PIXEL_FORMAT_I420A);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> owned_memory_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
std::move(owned_memory_frame), pool));
EXPECT_EQ(owned_memory_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(owned_memory_frame_adapter->height(), kNaturalSize.height());
// The I420A frame should have the same size as the natural size
auto i420a_frame = owned_memory_frame_adapter->ToI420();
ASSERT_TRUE(i420a_frame);
EXPECT_EQ(webrtc::VideoFrameBuffer::Type::kI420A, i420a_frame->type());
EXPECT_EQ(i420a_frame->width(), kNaturalSize.width());
EXPECT_EQ(i420a_frame->height(), kNaturalSize.height());
}
TEST(WebRtcVideoFrameAdapterTest, Nv12WrapsGmbWhenNoScalingNeeededWithFeature) {
base::test::ScopedFeatureList scoped_feautre_list;
scoped_feautre_list.InitAndEnableFeature(
......@@ -184,4 +223,60 @@ TEST(WebRtcVideoFrameAdapterTest, Nv12ScalesGmbWithFeature) {
EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
}
TEST(WebRtcVideoFrameAdapterTest, Nv12OwnedMemoryFrame) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize = kVisibleRect.size();
scoped_refptr<WebRtcVideoFrameAdapter::BufferPoolOwner> pool =
new WebRtcVideoFrameAdapter::BufferPoolOwner();
// The adapter should report width and height from the natural size for
// VideoFrame backed by owned memory.
auto owned_memory_frame =
CreateTestMemoryFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoPixelFormat::PIXEL_FORMAT_NV12);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> owned_memory_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
std::move(owned_memory_frame), pool));
EXPECT_EQ(owned_memory_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(owned_memory_frame_adapter->height(), kNaturalSize.height());
// The NV12 frame should have the same size as the visible rect size
std::vector<webrtc::VideoFrameBuffer::Type> nv12_type{
webrtc::VideoFrameBuffer::Type::kNV12};
auto nv12_frame = owned_memory_frame_adapter->GetMappedFrameBuffer(nv12_type);
ASSERT_TRUE(nv12_frame);
EXPECT_EQ(webrtc::VideoFrameBuffer::Type::kNV12, nv12_frame->type());
EXPECT_EQ(nv12_frame->width(), kVisibleRect.size().width());
EXPECT_EQ(nv12_frame->height(), kVisibleRect.size().height());
}
TEST(WebRtcVideoFrameAdapterTest, Nv12ScaleOwnedMemoryFrame) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
scoped_refptr<WebRtcVideoFrameAdapter::BufferPoolOwner> pool =
new WebRtcVideoFrameAdapter::BufferPoolOwner();
// The adapter should report width and height from the natural size for
// VideoFrame backed by owned memory.
auto owned_memory_frame =
CreateTestMemoryFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoPixelFormat::PIXEL_FORMAT_NV12);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> owned_memory_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
std::move(owned_memory_frame), pool));
EXPECT_EQ(owned_memory_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(owned_memory_frame_adapter->height(), kNaturalSize.height());
// The NV12 frame should have the same size as the natural size.
std::vector<webrtc::VideoFrameBuffer::Type> nv12_type{
webrtc::VideoFrameBuffer::Type::kNV12};
auto nv12_frame = owned_memory_frame_adapter->GetMappedFrameBuffer(nv12_type);
ASSERT_TRUE(nv12_frame);
EXPECT_EQ(webrtc::VideoFrameBuffer::Type::kNV12, nv12_frame->type());
EXPECT_EQ(nv12_frame->width(), kNaturalSize.width());
EXPECT_EQ(nv12_frame->height(), kNaturalSize.height());
}
} // namespace blink
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment