Commit 4653e564 authored by Ricky Liang's avatar Ricky Liang Committed by Commit Bot

Video capture with GpuMemoryBuffer - Video frame resolution adaptation

This CL allows Chrome and WebRTC to apply resolution adaptation with
GpuMemoryBuffer-based video frame.

GMB-based video frame soft-applies cropping and scaling by specifying
visible_rectangle and natural_size in the early stage of the processing
pipeline.  The actual cropping and scaling operations happen only when
either the frame is processed on the hardware (e.g. running hardware
encoder on GPU), or when the CPU really needs to access the frame data
(e.g. running software encoder).

Bug: 982201
Change-Id: Ib8f8278603188782f0657920a2140b71d4a4fc00
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1817861Reviewed-by: default avatarHenrik Boström <hbos@chromium.org>
Reviewed-by: default avatarKenneth Russell <kbr@chromium.org>
Commit-Queue: Ricky Liang <jcliang@chromium.org>
Cr-Commit-Position: refs/heads/master@{#702284}
parent 82f6313b
...@@ -78,6 +78,7 @@ class BLINK_PLATFORM_EXPORT WebRtcVideoTrackSource ...@@ -78,6 +78,7 @@ class BLINK_PLATFORM_EXPORT WebRtcVideoTrackSource
gfx::Rect accumulated_update_rect_; gfx::Rect accumulated_update_rect_;
base::Optional<int> previous_capture_counter_; base::Optional<int> previous_capture_counter_;
gfx::Rect cropping_rect_of_previous_delivered_frame_; gfx::Rect cropping_rect_of_previous_delivered_frame_;
gfx::Size natural_size_of_previous_delivered_frame_;
absl::optional<FrameAdaptationParams> absl::optional<FrameAdaptationParams>
custom_frame_adaptation_params_for_testing_; custom_frame_adaptation_params_for_testing_;
......
...@@ -61,5 +61,8 @@ specific_include_rules = { ...@@ -61,5 +61,8 @@ specific_include_rules = {
"+base/files/scoped_temp_dir.h", "+base/files/scoped_temp_dir.h",
"+base/memory/aligned_memory.h", "+base/memory/aligned_memory.h",
"+base/path_service.h", "+base/path_service.h",
"+base/test/bind_test_util.h",
"+base/threading/thread.h",
"+third_party/blink/renderer/platform/testing/video_frame_utils.h",
], ],
} }
...@@ -312,9 +312,15 @@ void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame( ...@@ -312,9 +312,15 @@ void VideoTrackAdapter::VideoFrameResolutionAdapter::DeliverFrame(
return; return;
} }
// TODO(perkj): Allow cropping / scaling of textures once // If the frame is a texture not backed up by GPU memory we don't apply
// https://crbug/362521 is fixed. // cropping/scaling and deliver the frame as-is, leaving it up to the
if (frame->HasTextures()) { // destination to rescale it. Otherwise, cropping and scaling is soft-applied
// before delivery for efficiency.
//
// TODO(crbug.com/362521): Allow cropping/scaling of non-GPU memory backed
// textures.
if (frame->HasTextures() &&
frame->storage_type() != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
DoDeliverFrame(std::move(frame), estimated_capture_time); DoDeliverFrame(std::move(frame), estimated_capture_time);
return; return;
} }
......
...@@ -6,9 +6,15 @@ ...@@ -6,9 +6,15 @@
#include <limits> #include <limits>
#include "base/synchronization/waitable_event.h"
#include "base/test/bind_test_util.h"
#include "base/threading/thread.h"
#include "media/base/limits.h" #include "media/base/limits.h"
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/web/modules/mediastream/mock_media_stream_video_source.h"
#include "third_party/blink/public/web/modules/mediastream/video_track_adapter_settings.h" #include "third_party/blink/public/web/modules/mediastream/video_track_adapter_settings.h"
#include "third_party/blink/renderer/platform/testing/io_task_runner_testing_platform_support.h"
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
namespace blink { namespace blink {
...@@ -189,4 +195,158 @@ TEST(VideoTrackAdapterTest, NoRescaling) { ...@@ -189,4 +195,158 @@ TEST(VideoTrackAdapterTest, NoRescaling) {
EXPECT_EQ(desired_size.height(), kInputHeight); EXPECT_EQ(desired_size.height(), kInputHeight);
} }
class VideoTrackAdapterFixtureTest : public ::testing::Test {
public:
VideoTrackAdapterFixtureTest()
: testing_render_thread_("TestingRenderThread"),
frame_received_(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED) {}
~VideoTrackAdapterFixtureTest() override = default;
protected:
void SetUp() override { testing_render_thread_.Start(); }
void TearDown() override {
if (track_added_) {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindOnce(&VideoTrackAdapter::RemoveTrack, adapter_,
null_track_.get()));
}
testing_render_thread_.Stop();
}
void CreateAdapter(const media::VideoCaptureFormat& capture_format) {
mock_source_ =
std::make_unique<MockMediaStreamVideoSource>(capture_format, false);
// Create the VideoTrackAdapter instance on |testing_render_thread_|.
base::WaitableEvent adapter_created(
base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindLambdaForTesting([&]() {
adapter_ = base::MakeRefCounted<VideoTrackAdapter>(
platform_support_->GetIOTaskRunner(), mock_source_->GetWeakPtr());
adapter_created.Signal();
}));
adapter_created.Wait();
}
// Create or re-configure the dummy |null_track_| with the given
// |adapter_settings|.
void ConfigureTrack(const VideoTrackAdapterSettings& adapter_settings) {
if (!track_added_) {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(
&VideoTrackAdapter::AddTrack, adapter_, null_track_.get(),
base::BindRepeating(
&VideoTrackAdapterFixtureTest::OnFrameDelivered,
base::Unretained(this)),
base::DoNothing(), base::DoNothing(), adapter_settings));
track_added_ = true;
} else {
testing_render_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&VideoTrackAdapter::ReconfigureTrack, adapter_,
null_track_.get(), adapter_settings));
}
}
void SetFrameValidationCallback(VideoCaptureDeliverFrameCB callback) {
frame_validation_callback_ = std::move(callback);
}
// Deliver |frame| to |adapter_| and wait until OnFrameDelivered signals that
// it receives the processed frame.
void DeliverAndValidateFrame(scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
auto deliver_frame = [&]() {
platform_support_->GetIOTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&VideoTrackAdapter::DeliverFrameOnIO,
adapter_, frame, estimated_capture_time));
};
frame_received_.Reset();
// Bounce the call to DeliverFrameOnIO off |testing_render_thread_| to
// synchronize with the AddTrackOnIO / ReconfigureTrackOnIO that would be
// invoked through ConfigureTrack.
testing_render_thread_.task_runner()->PostTask(
FROM_HERE, base::BindLambdaForTesting(deliver_frame));
frame_received_.Wait();
}
void OnFrameDelivered(scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
if (frame_validation_callback_) {
frame_validation_callback_.Run(frame, estimated_capture_time);
}
frame_received_.Signal();
}
ScopedTestingPlatformSupport<IOTaskRunnerTestingPlatformSupport>
platform_support_;
base::Thread testing_render_thread_;
std::unique_ptr<MockMediaStreamVideoSource> mock_source_;
scoped_refptr<VideoTrackAdapter> adapter_;
base::WaitableEvent frame_received_;
VideoCaptureDeliverFrameCB frame_validation_callback_;
// For testing we use a nullptr for MediaStreamVideoTrack.
std::unique_ptr<MediaStreamVideoTrack> null_track_ = nullptr;
bool track_added_ = false;
};
TEST_F(VideoTrackAdapterFixtureTest, DeliverFrame_GpuMemoryBuffer) {
// Attributes for the original input frame.
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(1280, 720);
const double kFrameRate = 30.0;
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
// Initialize the VideoTrackAdapter to handle GpuMemoryBuffer. NV12 is the
// only pixel format supported at the moment.
const media::VideoCaptureFormat stream_format(kCodedSize, kFrameRate,
media::PIXEL_FORMAT_NV12);
CreateAdapter(stream_format);
// Keep the desired size the same as the natural size of the original frame.
VideoTrackAdapterSettings settings_nonscaled(kNaturalSize, kFrameRate);
ConfigureTrack(settings_nonscaled);
auto check_nonscaled = [&](scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
// We should get the original frame as-is here.
EXPECT_EQ(frame->storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_EQ(frame->GetGpuMemoryBuffer(), gmb_frame->GetGpuMemoryBuffer());
EXPECT_EQ(frame->coded_size(), kCodedSize);
EXPECT_EQ(frame->visible_rect(), kVisibleRect);
EXPECT_EQ(frame->natural_size(), kNaturalSize);
};
SetFrameValidationCallback(base::BindLambdaForTesting(check_nonscaled));
DeliverAndValidateFrame(gmb_frame, base::TimeTicks());
// Scale the original frame by a factor of 0.5x.
const gfx::Size kDesiredSize(640, 360);
VideoTrackAdapterSettings settings_scaled(kDesiredSize, kFrameRate);
ConfigureTrack(settings_scaled);
auto check_scaled = [&](scoped_refptr<media::VideoFrame> frame,
base::TimeTicks estimated_capture_time) {
// The original frame should be wrapped in a new frame, with |kDesiredSize|
// exposed as natural size of the wrapped frame.
EXPECT_EQ(frame->storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_EQ(frame->GetGpuMemoryBuffer(), gmb_frame->GetGpuMemoryBuffer());
EXPECT_EQ(frame->coded_size(), kCodedSize);
EXPECT_EQ(frame->visible_rect(), kVisibleRect);
EXPECT_EQ(frame->natural_size(), kDesiredSize);
};
SetFrameValidationCallback(base::BindLambdaForTesting(check_scaled));
DeliverAndValidateFrame(gmb_frame, base::TimeTicks());
}
} // namespace blink } // namespace blink
...@@ -1618,6 +1618,8 @@ jumbo_static_library("test_support") { ...@@ -1618,6 +1618,8 @@ jumbo_static_library("test_support") {
"testing/unit_test_helpers.h", "testing/unit_test_helpers.h",
"testing/url_test_helpers.cc", "testing/url_test_helpers.cc",
"testing/url_test_helpers.h", "testing/url_test_helpers.h",
"testing/video_frame_utils.cc",
"testing/video_frame_utils.h",
"testing/viewport_layers_setup.cc", "testing/viewport_layers_setup.cc",
"testing/viewport_layers_setup.h", "testing/viewport_layers_setup.h",
"testing/weburl_loader_mock.cc", "testing/weburl_loader_mock.cc",
...@@ -1861,6 +1863,7 @@ jumbo_source_set("blink_platform_unittests_sources") { ...@@ -1861,6 +1863,7 @@ jumbo_source_set("blink_platform_unittests_sources") {
"weborigin/scheme_registry_test.cc", "weborigin/scheme_registry_test.cc",
"weborigin/security_origin_test.cc", "weborigin/security_origin_test.cc",
"weborigin/security_policy_test.cc", "weborigin/security_policy_test.cc",
"webrtc/webrtc_video_frame_adapter_test.cc",
] ]
if (is_win) { if (is_win) {
......
...@@ -30,5 +30,6 @@ specific_include_rules = { ...@@ -30,5 +30,6 @@ specific_include_rules = {
"+gpu/command_buffer/common/mailbox.h", "+gpu/command_buffer/common/mailbox.h",
"+media/video/mock_gpu_video_accelerator_factories.h", "+media/video/mock_gpu_video_accelerator_factories.h",
"+media/video/mock_video_encode_accelerator.h", "+media/video/mock_video_encode_accelerator.h",
"+third_party/blink/renderer/platform/testing/video_frame_utils.h",
], ],
} }
...@@ -69,6 +69,8 @@ void WebRtcVideoTrackSource::OnFrameCaptured( ...@@ -69,6 +69,8 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
if (!(frame->IsMappable() && if (!(frame->IsMappable() &&
(frame->format() == media::PIXEL_FORMAT_I420 || (frame->format() == media::PIXEL_FORMAT_I420 ||
frame->format() == media::PIXEL_FORMAT_I420A)) && frame->format() == media::PIXEL_FORMAT_I420A)) &&
!(frame->storage_type() ==
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) &&
!frame->HasTextures()) { !frame->HasTextures()) {
// Since connecting sources and sinks do not check the format, we need to // Since connecting sources and sinks do not check the format, we need to
// just ignore formats that we can not handle. // just ignore formats that we can not handle.
...@@ -123,9 +125,11 @@ void WebRtcVideoTrackSource::OnFrameCaptured( ...@@ -123,9 +125,11 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
timestamp_aligner_.TranslateTimestamp(frame->timestamp().InMicroseconds(), timestamp_aligner_.TranslateTimestamp(frame->timestamp().InMicroseconds(),
now_us); now_us);
// Return |frame| directly if it is texture backed, because there is no // Return |frame| directly if it is texture not backed up by GPU memory,
// cropping support for texture yet. See http://crbug/503653. // because there is no cropping support for texture yet. See
if (frame->HasTextures()) { // http://crbug/503653.
if (frame->HasTextures() &&
frame->storage_type() != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// The webrtc::VideoFrame::UpdateRect expected by WebRTC must // The webrtc::VideoFrame::UpdateRect expected by WebRTC must
// be relative to the |visible_rect()|. We need to translate. // be relative to the |visible_rect()|. We need to translate.
const auto cropped_rect = const auto cropped_rect =
...@@ -177,6 +181,20 @@ void WebRtcVideoTrackSource::OnFrameCaptured( ...@@ -177,6 +181,20 @@ void WebRtcVideoTrackSource::OnFrameCaptured(
return; return;
} }
// Delay scaling if |video_frame| is backed by GpuMemoryBuffer.
if (video_frame->storage_type() ==
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// When scaling is applied and any part of the frame has changed, we mark
// the whole frame as changed.
const auto update_rect_on_scaled =
accumulated_update_rect_.IsEmpty()
? gfx::Rect()
: gfx::Rect(video_frame->natural_size());
DeliverFrame(std::move(video_frame), update_rect_on_scaled,
translated_camera_time_us);
return;
}
// Since scaling is required, hard-apply both the cropping and scaling before // Since scaling is required, hard-apply both the cropping and scaling before
// we hand the frame over to WebRTC. // we hand the frame over to WebRTC.
const bool has_alpha = video_frame->format() == media::PIXEL_FORMAT_I420A; const bool has_alpha = video_frame->format() == media::PIXEL_FORMAT_I420A;
...@@ -245,10 +263,17 @@ void WebRtcVideoTrackSource::DeliverFrame( ...@@ -245,10 +263,17 @@ void WebRtcVideoTrackSource::DeliverFrame(
// If the cropping or the size have changed since the previous // If the cropping or the size have changed since the previous
// frame, even if nothing in the incoming coded frame content has changed, we // frame, even if nothing in the incoming coded frame content has changed, we
// have to assume that every pixel in the outgoing frame has changed. // have to assume that every pixel in the outgoing frame has changed.
if (frame->visible_rect() != cropping_rect_of_previous_delivered_frame_) { if (frame->visible_rect() != cropping_rect_of_previous_delivered_frame_ ||
frame->natural_size() != natural_size_of_previous_delivered_frame_) {
cropping_rect_of_previous_delivered_frame_ = frame->visible_rect(); cropping_rect_of_previous_delivered_frame_ = frame->visible_rect();
update_rect = gfx::Rect(0, 0, frame->visible_rect().width(), natural_size_of_previous_delivered_frame_ = frame->natural_size();
frame->visible_rect().height()); if (frame->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
// Use the frame natural size since we delay the scaling.
update_rect = gfx::Rect(frame->natural_size());
} else {
update_rect = gfx::Rect(0, 0, frame->visible_rect().width(),
frame->visible_rect().height());
}
} }
// Clear accumulated_update_rect_. // Clear accumulated_update_rect_.
......
...@@ -50,4 +50,10 @@ specific_include_rules = { ...@@ -50,4 +50,10 @@ specific_include_rules = {
'testing_platform_support_with_mock_scheduler\.cc': [ 'testing_platform_support_with_mock_scheduler\.cc': [
"+base/task/sequence_manager/test/sequence_manager_for_test.h", "+base/task/sequence_manager/test/sequence_manager_for_test.h",
], ],
"video_frame_utils\.cc": [
"+media/video/fake_gpu_memory_buffer.h",
],
"video_frame_utils\.h": [
"+media/base/video_frame.h",
],
} }
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
#include "base/bind_helpers.h"
#include "media/video/fake_gpu_memory_buffer.h"
namespace blink {
scoped_refptr<media::VideoFrame> CreateTestFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
media::VideoFrame::StorageType storage_type) {
scoped_refptr<media::VideoFrame> frame;
switch (storage_type) {
case media::VideoFrame::STORAGE_OWNED_MEMORY:
frame = media::VideoFrame::CreateFrame(media::PIXEL_FORMAT_I420,
coded_size, visible_rect,
natural_size, base::TimeDelta());
break;
case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER: {
auto gmb = std::make_unique<media::FakeGpuMemoryBuffer>(
coded_size, gfx::BufferFormat::YUV_420_BIPLANAR);
const gpu::MailboxHolder empty_mailboxes[media::VideoFrame::kMaxPlanes];
frame = media::VideoFrame::WrapExternalGpuMemoryBuffer(
visible_rect, natural_size, std::move(gmb), empty_mailboxes,
base::NullCallback(), base::TimeDelta());
break;
}
default:
NOTREACHED() << "Unexpected storage type";
}
return frame;
}
} // namespace blink
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
#include "media/base/video_frame.h"
namespace blink {
scoped_refptr<media::VideoFrame> CreateTestFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
media::VideoFrame::StorageType storage_type);
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_TESTING_VIDEO_FRAME_UTILS_H_
...@@ -6,8 +6,8 @@ ...@@ -6,8 +6,8 @@
#include "base/bind_helpers.h" #include "base/bind_helpers.h"
#include "base/logging.h" #include "base/logging.h"
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/webrtc/common_video/include/video_frame_buffer.h" #include "third_party/webrtc/common_video/include/video_frame_buffer.h"
#include "third_party/webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h" #include "third_party/webrtc/rtc_base/ref_counted_object.h"
#include "ui/gfx/gpu_memory_buffer.h" #include "ui/gfx/gpu_memory_buffer.h"
...@@ -85,19 +85,33 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame( ...@@ -85,19 +85,33 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame(
const media::VideoFrame& source_frame) { const media::VideoFrame& source_frame) {
// NV12 is the only supported format. // NV12 is the only supported format.
DCHECK_EQ(source_frame.format(), media::PIXEL_FORMAT_NV12); DCHECK_EQ(source_frame.format(), media::PIXEL_FORMAT_NV12);
DCHECK_EQ(source_frame.storage_type(),
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
gfx::GpuMemoryBuffer* gmb = source_frame.GetGpuMemoryBuffer(); gfx::GpuMemoryBuffer* gmb = source_frame.GetGpuMemoryBuffer();
if (!gmb || !gmb->Map()) { if (!gmb || !gmb->Map()) {
return nullptr; return nullptr;
} }
// Crop to the visible rectangle specified in |source_frame|.
const uint8_t* src_y = (reinterpret_cast<const uint8_t*>(gmb->memory(0)) +
source_frame.visible_rect().x() +
(source_frame.visible_rect().y() * gmb->stride(0)));
const uint8_t* src_uv =
(reinterpret_cast<const uint8_t*>(gmb->memory(1)) +
((source_frame.visible_rect().x() / 2) * 2) +
((source_frame.visible_rect().y() / 2) * gmb->stride(1)));
// Convert to I420 and scale to the natural size specified in |source_frame|.
scoped_refptr<media::VideoFrame> i420_frame = media::VideoFrame::CreateFrame( scoped_refptr<media::VideoFrame> i420_frame = media::VideoFrame::CreateFrame(
media::PIXEL_FORMAT_I420, source_frame.coded_size(), media::PIXEL_FORMAT_I420, source_frame.natural_size(),
source_frame.visible_rect(), source_frame.natural_size(), gfx::Rect(source_frame.natural_size()), source_frame.natural_size(),
source_frame.timestamp()); source_frame.timestamp());
i420_frame->metadata()->MergeMetadataFrom(source_frame.metadata()); i420_frame->metadata()->MergeMetadataFrom(source_frame.metadata());
const auto& i420_planes = i420_frame->layout().planes(); const auto& i420_planes = i420_frame->layout().planes();
int ret = libyuv::NV12ToI420( webrtc::NV12ToI420Scaler scaler;
reinterpret_cast<const uint8_t*>(gmb->memory(0)), gmb->stride(0), scaler.NV12ToI420Scale(
reinterpret_cast<const uint8_t*>(gmb->memory(1)), gmb->stride(1), src_y, gmb->stride(0), src_uv, gmb->stride(1),
source_frame.visible_rect().width(), source_frame.visible_rect().height(),
i420_frame->data(media::VideoFrame::kYPlane), i420_frame->data(media::VideoFrame::kYPlane),
i420_planes[media::VideoFrame::kYPlane].stride, i420_planes[media::VideoFrame::kYPlane].stride,
i420_frame->data(media::VideoFrame::kUPlane), i420_frame->data(media::VideoFrame::kUPlane),
...@@ -105,10 +119,9 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame( ...@@ -105,10 +119,9 @@ scoped_refptr<media::VideoFrame> ConstructI420VideoFrame(
i420_frame->data(media::VideoFrame::kVPlane), i420_frame->data(media::VideoFrame::kVPlane),
i420_planes[media::VideoFrame::kVPlane].stride, i420_planes[media::VideoFrame::kVPlane].stride,
i420_frame->coded_size().width(), i420_frame->coded_size().height()); i420_frame->coded_size().width(), i420_frame->coded_size().height());
gmb->Unmap(); gmb->Unmap();
if (ret) {
return nullptr;
}
return i420_frame; return i420_frame;
} }
...@@ -127,10 +140,16 @@ webrtc::VideoFrameBuffer::Type WebRtcVideoFrameAdapter::type() const { ...@@ -127,10 +140,16 @@ webrtc::VideoFrameBuffer::Type WebRtcVideoFrameAdapter::type() const {
} }
int WebRtcVideoFrameAdapter::width() const { int WebRtcVideoFrameAdapter::width() const {
if (frame_->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
return frame_->natural_size().width();
}
return frame_->visible_rect().width(); return frame_->visible_rect().width();
} }
int WebRtcVideoFrameAdapter::height() const { int WebRtcVideoFrameAdapter::height() const {
if (frame_->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
return frame_->natural_size().height();
}
return frame_->visible_rect().height(); return frame_->visible_rect().height();
} }
...@@ -142,8 +161,8 @@ WebRtcVideoFrameAdapter::CreateFrameAdapter() const { ...@@ -142,8 +161,8 @@ WebRtcVideoFrameAdapter::CreateFrameAdapter() const {
if (!i420_frame) { if (!i420_frame) {
return new rtc::RefCountedObject< return new rtc::RefCountedObject<
FrameAdapter<webrtc::I420BufferInterface>>( FrameAdapter<webrtc::I420BufferInterface>>(
media::VideoFrame::CreateColorFrame(frame_->visible_rect().size(), 0u, media::VideoFrame::CreateColorFrame(frame_->natural_size(), 0u, 0x80,
0x80, 0x80, frame_->timestamp())); 0x80, frame_->timestamp()));
} }
// Keep |frame_| alive until |i420_frame| is destroyed. // Keep |frame_| alive until |i420_frame| is destroyed.
......
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/webrtc/webrtc_video_frame_adapter.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/platform/testing/video_frame_utils.h"
#include "third_party/webrtc/api/video/video_frame_buffer.h"
#include "third_party/webrtc/rtc_base/ref_counted_object.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace blink {
TEST(WebRtcVideoFrameAdapterTest, WidthAndHeight) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
// The adapter should report width and height from the visible rectangle for
// VideoFrame backed by owned memory.
auto owned_memory_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_OWNED_MEMORY);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> owned_memory_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(
std::move(owned_memory_frame)));
EXPECT_EQ(owned_memory_frame_adapter->width(), kVisibleRect.width());
EXPECT_EQ(owned_memory_frame_adapter->height(), kVisibleRect.height());
// The adapter should report width and height from the natural size for
// VideoFrame backed by GpuMemoryBuffer.
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> gmb_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(std::move(gmb_frame)));
EXPECT_EQ(gmb_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(gmb_frame_adapter->height(), kNaturalSize.height());
}
TEST(WebRtcVideoFrameAdapterTest, ToI420DownScale) {
const gfx::Size kCodedSize(1280, 960);
const gfx::Rect kVisibleRect(0, 120, 1280, 720);
const gfx::Size kNaturalSize(640, 360);
auto gmb_frame =
CreateTestFrame(kCodedSize, kVisibleRect, kNaturalSize,
media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
// The adapter should report width and height from the natural size for
// VideoFrame backed by GpuMemoryBuffer.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> gmb_frame_adapter(
new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(std::move(gmb_frame)));
EXPECT_EQ(gmb_frame_adapter->width(), kNaturalSize.width());
EXPECT_EQ(gmb_frame_adapter->height(), kNaturalSize.height());
// The I420 frame should have the same size as the natural size
auto i420_frame = gmb_frame_adapter->ToI420();
EXPECT_EQ(i420_frame->width(), kNaturalSize.width());
EXPECT_EQ(i420_frame->height(), kNaturalSize.height());
}
} // namespace blink
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment