Commit e6ed840d authored by Evan Shrubsole's avatar Evan Shrubsole Committed by Commit Bot

Avoid converting GMB frames to I420 under feature

In the kWebRtcLibvpxEncodeNV12 feature, avoid converting the scaled
image to I420. This saves an allocation in creating the NV12ToI420
scale, and a copy of the scaled UV plane.

Bug: 1134165
Change-Id: I6b4e433ae0ea4b22c49cc5f14904b1a67439f34b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2483828Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Commit-Queue: Evan Shrubsole <eshr@google.com>
Cr-Commit-Position: refs/heads/master@{#822077}
parent fa79b41d
...@@ -273,6 +273,42 @@ scoped_refptr<media::VideoFrame> MakeScaledI420VideoFrame( ...@@ -273,6 +273,42 @@ scoped_refptr<media::VideoFrame> MakeScaledI420VideoFrame(
return dst_frame; return dst_frame;
} }
scoped_refptr<media::VideoFrame> MakeScaledNV12VideoFrame(
scoped_refptr<media::VideoFrame> source_frame,
scoped_refptr<blink::WebRtcVideoFrameAdapter::BufferPoolOwner>
scaled_frame_pool) {
gfx::GpuMemoryBuffer* gmb = source_frame->GetGpuMemoryBuffer();
if (!gmb || !gmb->Map()) {
return nullptr;
}
// Crop to the visible rectangle specified in |source_frame|.
const uint8_t* src_y = (reinterpret_cast<const uint8_t*>(gmb->memory(0)) +
source_frame->visible_rect().x() +
(source_frame->visible_rect().y() * gmb->stride(0)));
const uint8_t* src_uv =
(reinterpret_cast<const uint8_t*>(gmb->memory(1)) +
((source_frame->visible_rect().x() / 2) * 2) +
((source_frame->visible_rect().y() / 2) * gmb->stride(1)));
auto dst_frame = scaled_frame_pool->CreateFrame(
media::PIXEL_FORMAT_NV12, source_frame->natural_size(),
gfx::Rect(source_frame->natural_size()), source_frame->natural_size(),
source_frame->timestamp());
dst_frame->metadata()->MergeMetadataFrom(source_frame->metadata());
const auto& nv12_planes = dst_frame->layout().planes();
libyuv::NV12Scale(src_y, gmb->stride(0), src_uv, gmb->stride(1),
source_frame->visible_rect().width(),
source_frame->visible_rect().height(),
dst_frame->data(media::VideoFrame::kYPlane),
nv12_planes[media::VideoFrame::kYPlane].stride,
dst_frame->data(media::VideoFrame::kUVPlane),
nv12_planes[media::VideoFrame::kUVPlane].stride,
dst_frame->coded_size().width(),
dst_frame->coded_size().height(), libyuv::kFilterBox);
gmb->Unmap();
return dst_frame;
}
scoped_refptr<media::VideoFrame> ConstructVideoFrameFromGpu( scoped_refptr<media::VideoFrame> ConstructVideoFrameFromGpu(
scoped_refptr<media::VideoFrame> source_frame, scoped_refptr<media::VideoFrame> source_frame,
scoped_refptr<blink::WebRtcVideoFrameAdapter::BufferPoolOwner> scoped_refptr<blink::WebRtcVideoFrameAdapter::BufferPoolOwner>
...@@ -287,11 +323,14 @@ scoped_refptr<media::VideoFrame> ConstructVideoFrameFromGpu( ...@@ -287,11 +323,14 @@ scoped_refptr<media::VideoFrame> ConstructVideoFrameFromGpu(
// Convert to I420 and scale to the natural size specified in |source_frame|. // Convert to I420 and scale to the natural size specified in |source_frame|.
const bool dont_convert_nv12_image = const bool dont_convert_nv12_image =
base::FeatureList::IsEnabled(blink::features::kWebRtcLibvpxEncodeNV12); base::FeatureList::IsEnabled(blink::features::kWebRtcLibvpxEncodeNV12);
if (dont_convert_nv12_image && if (!dont_convert_nv12_image) {
source_frame->natural_size() == source_frame->visible_rect().size()) { return MakeScaledI420VideoFrame(std::move(source_frame),
std::move(scaled_frame_pool));
} else if (source_frame->natural_size() ==
source_frame->visible_rect().size()) {
return WrapGmbVideoFrameForMappedMemoryAccess(std::move(source_frame)); return WrapGmbVideoFrameForMappedMemoryAccess(std::move(source_frame));
} else { } else {
return MakeScaledI420VideoFrame(std::move(source_frame), return MakeScaledNV12VideoFrame(std::move(source_frame),
std::move(scaled_frame_pool)); std::move(scaled_frame_pool));
} }
} }
......
...@@ -91,9 +91,12 @@ TEST(WebRtcVideoFrameAdapterTest, ToI420DownScaleGmb) { ...@@ -91,9 +91,12 @@ TEST(WebRtcVideoFrameAdapterTest, ToI420DownScaleGmb) {
auto i420_frame = gmb_frame_adapter->ToI420(); auto i420_frame = gmb_frame_adapter->ToI420();
EXPECT_EQ(i420_frame->width(), kNaturalSize.width()); EXPECT_EQ(i420_frame->width(), kNaturalSize.width());
EXPECT_EQ(i420_frame->height(), kNaturalSize.height()); EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
auto* get_i420_frame = gmb_frame_adapter->GetI420();
EXPECT_EQ(get_i420_frame->width(), kNaturalSize.width());
EXPECT_EQ(get_i420_frame->height(), kNaturalSize.height());
} }
TEST(WebRtcVideoFrameAdapterTest, Nv12WrapsGmbWhenNoScalingNeeeded) { TEST(WebRtcVideoFrameAdapterTest, Nv12WrapsGmbWhenNoScalingNeeededWithFeature) {
base::test::ScopedFeatureList scoped_feautre_list; base::test::ScopedFeatureList scoped_feautre_list;
scoped_feautre_list.InitAndEnableFeature( scoped_feautre_list.InitAndEnableFeature(
blink::features::kWebRtcLibvpxEncodeNV12); blink::features::kWebRtcLibvpxEncodeNV12);
...@@ -135,4 +138,45 @@ TEST(WebRtcVideoFrameAdapterTest, Nv12WrapsGmbWhenNoScalingNeeeded) { ...@@ -135,4 +138,45 @@ TEST(WebRtcVideoFrameAdapterTest, Nv12WrapsGmbWhenNoScalingNeeeded) {
EXPECT_EQ(i420_frame->height(), kNaturalSize.height()); EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
} }
TEST(WebRtcVideoFrameAdapterTest, Nv12ScalesGmbWithFeature) {
base::test::ScopedFeatureList scoped_feautre_list;
scoped_feautre_list.InitAndEnableFeature(
blink::features::kWebRtcLibvpxEncodeNV12);
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
scoped_refptr<WebRtcVideoFrameAdapter::BufferPoolOwner> pool =
new WebRtcVideoFrameAdapter::BufferPoolOwner();
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
// The adapter should report width and height from the natural size for
// VideoFrame backed by GpuMemoryBuffer.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> gmb_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(gmb_frame, pool));
EXPECT_EQ(gmb_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(gmb_frame_adapter->height(), kNaturalSize.height());
// Under feature, expect that the adapted frame is NV12 with frame should
// have the same size as the natural size.
std::vector<webrtc::VideoFrameBuffer::Type> nv12_type{
webrtc::VideoFrameBuffer::Type::kNV12};
auto nv12_frame = gmb_frame_adapter->GetMappedFrameBuffer(nv12_type);
ASSERT_TRUE(nv12_frame);
EXPECT_EQ(webrtc::VideoFrameBuffer::Type::kNV12, nv12_frame->type());
EXPECT_EQ(nv12_frame->width(), kNaturalSize.width());
EXPECT_EQ(nv12_frame->height(), kNaturalSize.height());
// Even though we have an NV12 frame, ToI420 should return an I420 frame.
std::vector<webrtc::VideoFrameBuffer::Type> i420_type{
webrtc::VideoFrameBuffer::Type::kI420};
EXPECT_FALSE(gmb_frame_adapter->GetMappedFrameBuffer(i420_type));
auto i420_frame = gmb_frame_adapter->ToI420();
ASSERT_TRUE(i420_frame);
EXPECT_EQ(i420_frame->width(), kNaturalSize.width());
EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
}
} // namespace blink } // namespace blink
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment