Commit 4653e564 authored by Ricky Liang's avatar Ricky Liang Committed by Commit Bot

Video capture with GpuMemoryBuffer - Video frame resolution adaptation

This CL allows Chrome and WebRTC to apply resolution adaptation with
GpuMemoryBuffer-based video frame.

GMB-based video frame soft-applies cropping and scaling by specifying
visible_rectangle and natural_size in the early stage of the processing
pipeline.  The actual cropping and scaling operations happen only when
either the frame is processed on the hardware (e.g. running hardware
encoder on GPU), or when the CPU really needs to access the frame data
(e.g. running software encoder).

Bug: 982201
Change-Id: Ib8f8278603188782f0657920a2140b71d4a4fc00
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1817861Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Reviewed-by: default avatarKenneth Russell <kbr@chromium.org>
Commit-Queue: Ricky Liang <jcliang@chromium.org>
Cr-Commit-Position: refs/heads/master@{#702284}
parent 82f6313b
......@@ -78,6 +78,7 @@ class BLINK_PLATFORM_EXPORT WebRtcVideoTrackSource
gfx::Rect accumulated_update_rect_;
base::Optional<int> previous_capture_counter_;
gfx::Rect cropping_rect_of_previous_delivered_frame_;
gfx::Size natural_size_of_previous_delivered_frame_;
absl::optional<FrameAdaptationParams>
custom_frame_adaptation_params_for_testing_;
......
......@@ -61,5 +61,8 @@ specific_include_rules = {
"+base/files/scoped_temp_dir.h",
"+base/memory/aligned_memory.h",
"+base/path_service.h",
"+base/test/bind_test_util.h",
"+base/threading/thread.h",
"+third_party/blink/renderer/platform/testing/video_frame_utils.h",
],
}
......@@ -312,9 +312,15 @@ void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame(
return;
}
// TODO(perkj): Allow cropping / scaling of textures once
// https://crbug/362521 is fixed.
if (frame->HasTextures()) {
// If the frame is a texture not backed up by GPU memory we don't apply
// cropping/scaling and deliver the frame as-is, leaving it up to the
// destination to rescale it. Otherwise, cropping and scaling is soft-applied
// before delivery for efficiency.
//
// TODO(crbug.com/362521): Allow cropping/scaling of non-GPU memory backed
// textures.
if (frame->HasTextures() &&
frame->storage_type() != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
DoDeliverFrame(std::move(frame), estimated_capture_time);
return;
}
......
......@@ -6,9 +6,15 @@
#include <limits>
#include "base/synchronization/waitable_event.h"
#include "base/test/bind_test_util.h"
#include "base/threading/thread.h"
#include "media/base/limits.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/web/modules/mediastream/mock_media_stream_video_source.h"
#include "third_party/blink/public/web/modules/mediastream/video_track_adapter_settings.h"
#include "third_party/blink/renderer/platform/testing/io_task_runner_testing_platform_support.h"
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
namespace blink {
......@@ -189,4 +195,158 @@ TEST(VideoTrackAdapterTest, NoRescaling) {
EXPECT_EQ(desired_size.height(), kInputHeight);
}
class VideoTrackAdapterFixtureTest : public ::testing::Test {
public:
VideoTrackAdapterFixtureTest()
: testing_render_thread_("TestingRenderThread"),
frame_received_(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED) {}
~VideoTrackAdapterFixtureTest() override = default;
protected:
void SetUp() override { testing_render_thread_.Start(); }
void TearDown() override {
if (track_added_) {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindOnce(&VideoTrackAdapter::RemoveTrack, adapter_,
null_track_.get()));
}
testing_render_thread_.Stop();
}
void CreateAdapter(const media::VideoCaptureFormat& capture_format) {
mock_source_ =
std::make_unique<MockMediaStreamVideoSource>(capture_format, false);
// Create the VideoTrackAdapter instance on |testing_render_thread_|.
base::WaitableEvent adapter_created(
base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindLambdaForTesting([&]() {
adapter_ = base::MakeRefCounted<VideoTrackAdapter>(
platform_support_->GetIOTaskRunner(), mock_source_->GetWeakPtr());
adapter_created.Signal();
}));
adapter_created.Wait();
}
// Create or re-configure the dummy |null_track_| with the given
// |adapter_settings|.
void ConfigureTrack(const VideoTrackAdapterSettings& adapter_settings) {
if (!track_added_) {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(
&VideoTrackAdapter::AddTrack, adapter_, null_track_.get(),
base::BindRepeating(
&VideoTrackAdapterFixtureTest::OnFrameDelivered,
base::Unretained(this)),
base::DoNothing(), base::DoNothing(), adapter_settings));
track_added_ = true;
} else {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&VideoTrackAdapter::ReconfigureTrack, adapter_,
null_track_.get(), adapter_settings));
}
}
void SetFrameValidationCallback(VideoCaptureDeliverFrameCB callback) {
frame_validation_callback_ = std::move(callback);
}
// Deliver |frame| to |adapter_| and wait until OnFrameDelivered signals that
// it receives the processed frame.
void DeliverAndValidateFrame(scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
auto deliver_frame = [&]() {
platform_support_->GetIOTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&VideoTrackAdapter::DeliverFrameOnIO,
adapter_, frame, estimated_capture_time));
};
frame_received_.Reset();
// Bounce the call to DeliverFrameOnIO off |testing_render_thread_| to
// synchronize with the AddTrackOnIO / ReconfigureTrackOnIO that would be
// invoked through ConfigureTrack.
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindLambdaForTesting(deliver_frame));
frame_received_.Wait();
}
void OnFrameDelivered(scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
if (frame_validation_callback_) {
frame_validation_callback_.Run(frame, estimated_capture_time);
}
frame_received_.Signal();
}
ScopedTestingPlatformSupport<IOTaskRunnerTestingPlatformSupport>
platform_support_;
base::Thread testing_render_thread_;
std::unique_ptr<MockMediaStreamVideoSource> mock_source_;
scoped_refptr<VideoTrackAdapter> adapter_;
base::WaitableEvent frame_received_;
VideoCaptureDeliverFrameCB frame_validation_callback_;
// For testing we use a nullptr for MediaStreamVideoTrack.
std::unique_ptr<MediaStreamVideoTrack> null_track_ = nullptr;
bool track_added_ = false;
};
TEST_F(VideoTrackAdapterFixtureTest, DeliverFrame_GpuMemoryBuffer) {
// Attributes for the original input frame.
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(1280, 720);
const double kFrameRate = 30.0;
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
// Initialize the VideoTrackAdapter to handle GpuMemoryBuffer. NV12 is the
// only pixel format supported at the moment.
const media::VideoCaptureFormat stream_format(kCodedSize, kFrameRate,
media::PIXEL_FORMAT_NV12);
CreateAdapter(stream_format);
// Keep the desired size the same as the natural size of the original frame.
VideoTrackAdapterSettings settings_nonscaled(kNaturalSize, kFrameRate);
ConfigureTrack(settings_nonscaled);
auto check_nonscaled = [&](scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
// We should get the original frame as-is here.
EXPECT_EQ(frame->storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_EQ(frame->GetGpuMemoryBuffer(), gmb_frame->GetGpuMemoryBuffer());
EXPECT_EQ(frame->coded_size(), kCodedSize);
EXPECT_EQ(frame->visible_rect(), kVisibleRect);
EXPECT_EQ(frame->natural_size(), kNaturalSize);
};
SetFrameValidationCallback(base::BindLambdaForTesting(check_nonscaled));
DeliverAndValidateFrame(gmb_frame, base::TimeTicks());
// Scale the original frame by a factor of 0.5x.
const gfx::Size kDesiredSize(640, 360);
VideoTrackAdapterSettings settings_scaled(kDesiredSize, kFrameRate);
ConfigureTrack(settings_scaled);
auto check_scaled = [&](scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
// The original frame should be wrapped in a new frame, with |kDesiredSize|
// exposed as natural size of the wrapped frame.
EXPECT_EQ(frame->storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_EQ(frame->GetGpuMemoryBuffer(), gmb_frame->GetGpuMemoryBuffer());
EXPECT_EQ(frame->coded_size(), kCodedSize);
EXPECT_EQ(frame->visible_rect(), kVisibleRect);
EXPECT_EQ(frame->natural_size(), kDesiredSize);
};
SetFrameValidationCallback(base::BindLambdaForTesting(check_scaled));
DeliverAndValidateFrame(gmb_frame, base::TimeTicks());
}
} // namespace blink
......@@ -1618,6 +1618,8 @@ jumbo_static_library("test_support") {
"testing/unit_test_helpers.h",
"testing/url_test_helpers.cc",
"testing/url_test_helpers.h",
"testing/video_frame_utils.cc",
"testing/video_frame_utils.h",
"testing/viewport_layers_setup.cc",
"testing/viewport_layers_setup.h",
"testing/weburl_loader_mock.cc",
......@@ -1861,6 +1863,7 @@ jumbo_source_set("blink_platform_unittests_sources") {
"weborigin/scheme_registry_test.cc",
"weborigin/security_origin_test.cc",
"weborigin/security_policy_test.cc",
"webrtc/webrtc_video_frame_adapter_test.cc",
]
if (is_win) {
......
......@@ -30,5 +30,6 @@ specific_include_rules = {
"+gpu/command_buffer/common/mailbox.h",
"+media/video/mock_gpu_video_accelerator_factories.h",
"+media/video/mock_video_encode_accelerator.h",
"+third_party/blink/renderer/platform/testing/video_frame_utils.h",
],
}
......@@ -69,6 +69,8 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
if (!(frame->IsMappable() &&
(frame->format() == media::PIXEL_FORMAT_I420 ||
frame->format() == media::PIXEL_FORMAT_I420A)) &&
!(frame->storage_type() ==
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) &&
!frame->HasTextures()) {
// Since connecting sources and sinks do not check the format, we need to
// just ignore formats that we can not handle.
......@@ -123,9 +125,11 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
timestamp_aligner_.TranslateTimestamp(frame->timestamp().InMicroseconds(),
now_us);
// Return |frame| directly if it is texture backed, because there is no
// cropping support for texture yet. See http://crbug/503653.
if (frame->HasTextures()) {
// Return |frame| directly if it is texture not backed up by GPU memory,
// because there is no cropping support for texture yet. See
// http://crbug/503653.
if (frame->HasTextures() &&
frame->storage_type() != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// The webrtc::VideoFrame::UpdateRect expected by WebRTC must
// be relative to the |visible_rect()|. We need to translate.
const auto cropped_rect =
......@@ -177,6 +181,20 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
return;
}
// Delay scaling if |video_frame| is backed by GpuMemoryBuffer.
if (video_frame->storage_type() ==
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// When scaling is applied and any part of the frame has changed, we mark
// the whole frame as changed.
const auto update_rect_on_scaled =
accumulated_update_rect_.IsEmpty()
? gfx::Rect()
: gfx::Rect(video_frame->natural_size());
DeliverFrame(std::move(video_frame), update_rect_on_scaled,
translated_camera_time_us);
return;
}
// Since scaling is required, hard-apply both the cropping and scaling before
// we hand the frame over to WebRTC.
const bool has_alpha = video_frame->format() == media::PIXEL_FORMAT_I420A;
......@@ -245,10 +263,17 @@ void WebRtcVideoTrackSource::DeliverFrame(
// If the cropping or the size have changed since the previous
// frame, even if nothing in the incoming coded frame content has changed, we
// have to assume that every pixel in the outgoing frame has changed.
if (frame->visible_rect() != cropping_rect_of_previous_delivered_frame_) {
if (frame->visible_rect() != cropping_rect_of_previous_delivered_frame_ ||
frame->natural_size() != natural_size_of_previous_delivered_frame_) {
cropping_rect_of_previous_delivered_frame_ = frame->visible_rect();
update_rect = gfx::Rect(0, 0, frame->visible_rect().width(),
frame->visible_rect().height());
natural_size_of_previous_delivered_frame_ = frame->natural_size();
if (frame->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// Use the frame natural size since we delay the scaling.
update_rect = gfx::Rect(frame->natural_size());
} else {
update_rect = gfx::Rect(0, 0, frame->visible_rect().width(),
frame->visible_rect().height());
}
}
// Clear accumulated_update_rect_.
......
......@@ -4,12 +4,14 @@
#include <algorithm>
#include "base/bind_helpers.h"
#include "base/single_thread_task_runner.h"
#include "base/test/task_environment.h"
#include "media/base/video_frame.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/modules/peerconnection/webrtc_video_track_source.h"
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
#include "third_party/webrtc/api/video/video_frame.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h"
......@@ -32,7 +34,8 @@ class MockVideoSink : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
MOCK_METHOD1(OnFrame, void(const webrtc::VideoFrame&));
};
class WebRtcVideoTrackSourceTest : public ::testing::Test {
class WebRtcVideoTrackSourceTest
: public ::testing::TestWithParam<media::VideoFrame::StorageType> {
public:
WebRtcVideoTrackSourceTest()
: track_source_(new rtc::RefCountedObject<WebRtcVideoTrackSource>(
......@@ -46,21 +49,22 @@ class WebRtcVideoTrackSourceTest : public ::testing::Test {
void SendTestFrame(const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
scoped_refptr<media::VideoFrame> frame = media::VideoFrame::CreateFrame(
media::PIXEL_FORMAT_I420, coded_size, visible_rect, natural_size,
base::TimeDelta());
const gfx::Size& natural_size,
media::VideoFrame::StorageType storage_type) {
scoped_refptr<media::VideoFrame> frame =
CreateTestFrame(coded_size, visible_rect, natural_size, storage_type);
track_source_->OnFrameCaptured(frame);
}
void SendTestFrameWithUpdateRect(const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
int capture_counter,
const gfx::Rect& update_rect) {
scoped_refptr<media::VideoFrame> frame = media::VideoFrame::CreateFrame(
media::PIXEL_FORMAT_I420, coded_size, visible_rect, natural_size,
base::TimeDelta());
void SendTestFrameWithUpdateRect(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
int capture_counter,
const gfx::Rect& update_rect,
media::VideoFrame::StorageType storage_type) {
scoped_refptr<media::VideoFrame> frame =
CreateTestFrame(coded_size, visible_rect, natural_size, storage_type);
frame->metadata()->SetInteger(media::VideoFrameMetadata::CAPTURE_COUNTER,
capture_counter);
frame->metadata()->SetRect(media::VideoFrameMetadata::CAPTURE_UPDATE_RECT,
......@@ -112,10 +116,11 @@ class WebRtcVideoTrackSourceTest : public ::testing::Test {
scoped_refptr<WebRtcVideoTrackSource> track_source_;
};
TEST_F(WebRtcVideoTrackSourceTest, CropFrameTo640360) {
TEST_P(WebRtcVideoTrackSourceTest, CropFrameTo640360) {
const gfx::Size kCodedSize(640, 480);
const gfx::Rect kVisibleRect(0, 60, 640, 360);
const gfx::Size kNaturalSize(640, 360);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_KeepAsIs(kNaturalSize));
......@@ -124,13 +129,14 @@ TEST_F(WebRtcVideoTrackSourceTest, CropFrameTo640360) {
EXPECT_EQ(kNaturalSize.width(), frame.width());
EXPECT_EQ(kNaturalSize.height(), frame.height());
}));
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize);
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize, storage_type);
}
TEST_F(WebRtcVideoTrackSourceTest, CropFrameTo320320) {
TEST_P(WebRtcVideoTrackSourceTest, CropFrameTo320320) {
const gfx::Size kCodedSize(640, 480);
const gfx::Rect kVisibleRect(80, 0, 480, 480);
const gfx::Size kNaturalSize(320, 320);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_KeepAsIs(kNaturalSize));
......@@ -139,13 +145,14 @@ TEST_F(WebRtcVideoTrackSourceTest, CropFrameTo320320) {
EXPECT_EQ(kNaturalSize.width(), frame.width());
EXPECT_EQ(kNaturalSize.height(), frame.height());
}));
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize);
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize, storage_type);
}
TEST_F(WebRtcVideoTrackSourceTest, Scale720To640360) {
TEST_P(WebRtcVideoTrackSourceTest, Scale720To640360) {
const gfx::Size kCodedSize(1280, 720);
const gfx::Rect kVisibleRect(0, 0, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_KeepAsIs(kNaturalSize));
......@@ -154,13 +161,14 @@ TEST_F(WebRtcVideoTrackSourceTest, Scale720To640360) {
EXPECT_EQ(kNaturalSize.width(), frame.width());
EXPECT_EQ(kNaturalSize.height(), frame.height());
}));
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize);
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize, storage_type);
}
TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
TEST_P(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
const gfx::Size kCodedSize(640, 480);
const gfx::Rect kVisibleRect(0, 0, 640, 480);
const gfx::Size kNaturalSize(640, 480);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_KeepAsIs(kNaturalSize));
......@@ -174,7 +182,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
}));
int capture_counter = 101; // arbitrary absolute value
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
capture_counter, kUpdateRect1);
capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Update rect for second frame should get passed along.
......@@ -183,7 +191,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
ExpectUpdateRectEquals(kUpdateRect1, frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect1);
++capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Simulate the next frame getting dropped
......@@ -192,7 +200,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
const gfx::Rect kUpdateRect2(2, 3, 4, 5);
EXPECT_CALL(mock_sink_, OnFrame(_)).Times(0);
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect2);
++capture_counter, kUpdateRect2, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// The |update_rect| for the next frame is expected to contain the union
......@@ -208,7 +216,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
ExpectUpdateRectEquals(expected_update_rect, frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect3);
++capture_counter, kUpdateRect3, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Simulate a gap in |capture_counter|. This is expected to cause the whole
......@@ -220,7 +228,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
ExpectUpdateRectEquals(kVisibleRect, frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect4);
++capture_counter, kUpdateRect4, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Important edge case (expected to be fairly common): An empty update rect
......@@ -231,7 +239,8 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
EXPECT_TRUE(frame.update_rect().IsEmpty());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kEmptyRectWithZeroOrigin);
++capture_counter, kEmptyRectWithZeroOrigin,
storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
const gfx::Rect kEmptyRectWithNonZeroOrigin(10, 20, 0, 0);
......@@ -240,7 +249,8 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
EXPECT_TRUE(frame.update_rect().IsEmpty());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kEmptyRectWithNonZeroOrigin);
++capture_counter, kEmptyRectWithNonZeroOrigin,
storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// A frame without a CAPTURE_COUNTER and CAPTURE_UPDATE_RECT is treated as the
......@@ -249,14 +259,15 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithNoTransform) {
.WillOnce(Invoke([kVisibleRect](const webrtc::VideoFrame& frame) {
ExpectUpdateRectEquals(kVisibleRect, frame.update_rect());
}));
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize);
SendTestFrame(kCodedSize, kVisibleRect, kNaturalSize, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
}
TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
TEST_P(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
const gfx::Size kCodedSize(640, 480);
const gfx::Rect kVisibleRect(100, 50, 200, 80);
const gfx::Size kNaturalSize = gfx::Size(200, 80);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_KeepAsIs(kNaturalSize));
......@@ -270,7 +281,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
}));
int capture_counter = 101; // arbitrary absolute value
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
capture_counter, kUpdateRect1);
capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Update rect for second frame should get passed along.
......@@ -283,7 +294,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
ExpectUpdateRectEquals(expected_update_rect, frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect1);
++capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Update rect outside crop region.
......@@ -293,7 +304,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
EXPECT_TRUE(frame.update_rect().IsEmpty());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect2);
++capture_counter, kUpdateRect2, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// Update rect partly overlapping crop region.
......@@ -306,7 +317,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect3);
++capture_counter, kUpdateRect3, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// When crop origin changes, the whole frame is expected to be marked as
......@@ -319,7 +330,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect2, kNaturalSize,
++capture_counter, kUpdateRect1);
++capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// When crop size changes, the whole frame is expected to be marked as
......@@ -333,15 +344,16 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithCropFromUpstream) {
frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect3, kNaturalSize,
++capture_counter, kUpdateRect1);
++capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
}
TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
TEST_P(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
const gfx::Size kCodedSize(640, 480);
const gfx::Rect kVisibleRect(100, 50, 200, 80);
const gfx::Size kNaturalSize = gfx::Size(200, 80);
const gfx::Size kScaleToSize = gfx::Size(120, 50);
const media::VideoFrame::StorageType storage_type = GetParam();
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_Scale(kNaturalSize, kScaleToSize));
......@@ -355,7 +367,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
}));
int capture_counter = 101; // arbitrary absolute value
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
capture_counter, kUpdateRect1);
capture_counter, kUpdateRect1, storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
// When scaling is applied and UPDATE_RECT is not empty, we always expect a
......@@ -366,7 +378,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
frame.update_rect());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, kUpdateRect1);
++capture_counter, kUpdateRect1, storage_type);
// When UPDATE_RECT is empty, we expect to deliver an empty UpdateRect even if
// scaling is applied.
......@@ -375,7 +387,7 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
EXPECT_TRUE(frame.update_rect().IsEmpty());
}));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, gfx::Rect());
++capture_counter, gfx::Rect(), storage_type);
// When UPDATE_RECT is empty, but the scaling has changed, we expect to
// deliver an full UpdateRect.
......@@ -388,9 +400,15 @@ TEST_F(WebRtcVideoTrackSourceTest, UpdateRectWithScaling) {
track_source_->SetCustomFrameAdaptationParamsForTesting(
FrameAdaptation_Scale(kNaturalSize, kScaleToSize2));
SendTestFrameWithUpdateRect(kCodedSize, kVisibleRect, kNaturalSize,
++capture_counter, gfx::Rect());
++capture_counter, gfx::Rect(), storage_type);
Mock::VerifyAndClearExpectations(&mock_sink_);
}
INSTANTIATE_TEST_SUITE_P(
WebRtcVideoTrackSourceTest,
WebRtcVideoTrackSourceTest,
testing::Values(media::VideoFrame::STORAGE_OWNED_MEMORY,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER));
} // namespace blink
......@@ -50,4 +50,10 @@ specific_include_rules = {
'testing_platform_support_with_mock_scheduler\.cc': [
"+base/task/sequence_manager/test/sequence_manager_for_test.h",
],
"video_frame_utils\.cc": [
"+media/video/fake_gpu_memory_buffer.h",
],
"video_frame_utils\.h": [
"+media/base/video_frame.h",
],
}
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
#include "base/bind_helpers.h"
#include "media/video/fake_gpu_memory_buffer.h"
namespace blink {
scoped_refptr<media::VideoFrame> CreateTestFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
media::VideoFrame::StorageType storage_type) {
scoped_refptr<media::VideoFrame> frame;
switch (storage_type) {
case media::VideoFrame::STORAGE_OWNED_MEMORY:
frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420,
coded_size, visible_rect,
natural_size, base::TimeDelta());
break;
case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER: {
auto gmb = std::make_unique<media::FakeGpuMemoryBuffer>(
coded_size, gfx::BufferFormat::YUV_420_BIPLANAR);
const gpu::MailboxHolder empty_mailboxes[media::VideoFrame::kMaxPlanes];
frame = media::VideoFrame::WrapExternalGpuMemoryBuffer(
visible_rect, natural_size, std::move(gmb), empty_mailboxes,
base::NullCallback(), base::TimeDelta());
break;
}
default:
NOTREACHED() << "Unexpected storage type";
}
return frame;
}
} // namespace blink
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
#include "media/base/video_frame.h"
namespace blink {
scoped_refptr<media::VideoFrame> CreateTestFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
media::VideoFrame::StorageType storage_type);
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
......@@ -6,8 +6,8 @@
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/webrtc/common_video/include/video_frame_buffer.h"
#include "third_party/webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h"
#include "ui/gfx/gpu_memory_buffer.h"
......@@ -85,19 +85,33 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame(
const media::VideoFrame& source_frame) {
// NV12 is the only supported format.
DCHECK_EQ(source_frame.format(), media::PIXEL_FORMAT_NV12);
DCHECK_EQ(source_frame.storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
gfx::GpuMemoryBuffer* gmb = source_frame.GetGpuMemoryBuffer();
if (!gmb || !gmb->Map()) {
return nullptr;
}
// Crop to the visible rectangle specified in |source_frame|.
const uint8_t* src_y = (reinterpret_cast<const uint8_t*>(gmb->memory(0)) +
source_frame.visible_rect().x() +
(source_frame.visible_rect().y() * gmb->stride(0)));
const uint8_t* src_uv =
(reinterpret_cast<const uint8_t*>(gmb->memory(1)) +
((source_frame.visible_rect().x() / 2) * 2) +
((source_frame.visible_rect().y() / 2) * gmb->stride(1)));
// Convert to I420 and scale to the natural size specified in |source_frame|.
scoped_refptr<media::VideoFrame> i420_frame = media::VideoFrame::CreateFrame(
media::PIXEL_FORMAT_I420, source_frame.coded_size(),
source_frame.visible_rect(), source_frame.natural_size(),
media::PIXEL_FORMAT_I420, source_frame.natural_size(),
gfx::Rect(source_frame.natural_size()), source_frame.natural_size(),
source_frame.timestamp());
i420_frame->metadata()->MergeMetadataFrom(source_frame.metadata());
const auto& i420_planes = i420_frame->layout().planes();
int ret = libyuv::NV12ToI420(
reinterpret_cast<const uint8_t*>(gmb->memory(0)), gmb->stride(0),
reinterpret_cast<const uint8_t*>(gmb->memory(1)), gmb->stride(1),
webrtc::NV12ToI420Scaler scaler;
scaler.NV12ToI420Scale(
src_y, gmb->stride(0), src_uv, gmb->stride(1),
source_frame.visible_rect().width(), source_frame.visible_rect().height(),
i420_frame->data(media::VideoFrame::kYPlane),
i420_planes[media::VideoFrame::kYPlane].stride,
i420_frame->data(media::VideoFrame::kUPlane),
......@@ -105,10 +119,9 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame(
i420_frame->data(media::VideoFrame::kVPlane),
i420_planes[media::VideoFrame::kVPlane].stride,
i420_frame->coded_size().width(), i420_frame->coded_size().height());
gmb->Unmap();
if (ret) {
return nullptr;
}
return i420_frame;
}
......@@ -127,10 +140,16 @@ webrtc::VideoFrameBuffer::Type WebRtcVideoFrameAdapter::type() const {
}
int WebRtcVideoFrameAdapter::width() const {
if (frame_->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
return frame_->natural_size().width();
}
return frame_->visible_rect().width();
}
int WebRtcVideoFrameAdapter::height() const {
if (frame_->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
return frame_->natural_size().height();
}
return frame_->visible_rect().height();
}
......@@ -142,8 +161,8 @@ WebRtcVideoFrameAdapter::CreateFrameAdapter() const {
if (!i420_frame) {
return new rtc::RefCountedObject<
FrameAdapter<webrtc::I420BufferInterface>>(
media::VideoFrame::CreateColorFrame(frame_->visible_rect().size(), 0u,
0x80, 0x80, frame_->timestamp()));
media::VideoFrame::CreateColorFrame(frame_->natural_size(), 0u, 0x80,
0x80, frame_->timestamp()));
}
// Keep |frame_| alive until |i420_frame| is destroyed.
......
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/webrtc/webrtc_video_frame_adapter.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
#include "third_party/webrtc/api/video/video_frame_buffer.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace blink {
TEST(WebRtcVideoFrameAdapterTest, WidthAndHeight) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
// The adapter should report width and height from the visible rectangle for
// VideoFrame backed by owned memory.
auto owned_memory_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_OWNED_MEMORY);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> owned_memory_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
std::move(owned_memory_frame)));
EXPECT_EQ(owned_memory_frame_adapter->width(), kVisibleRect.width());
EXPECT_EQ(owned_memory_frame_adapter->height(), kVisibleRect.height());
// The adapter should report width and height from the natural size for
// VideoFrame backed by GpuMemoryBuffer.
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> gmb_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(std::move(gmb_frame)));
EXPECT_EQ(gmb_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(gmb_frame_adapter->height(), kNaturalSize.height());
}
TEST(WebRtcVideoFrameAdapterTest, ToI420DownScale) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
// The adapter should report width and height from the natural size for
// VideoFrame backed by GpuMemoryBuffer.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> gmb_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(std::move(gmb_frame)));
EXPECT_EQ(gmb_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(gmb_frame_adapter->height(), kNaturalSize.height());
// The I420 frame should have the same size as the natural size
auto i420_frame = gmb_frame_adapter->ToI420();
EXPECT_EQ(i420_frame->width(), kNaturalSize.width());
EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
}
} // namespace blink
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment