Commit 282b3c80 authored by Johannes Kron's avatar Johannes Kron Committed by Commit Bot

Add low-latency renderer algorithm for WebRTC streams

The current low-latency rendering runs without any renderer
algorithm. This gives a very low latency at the cost of
dropping frames in an uncontrolled way. The goal of the
low-latency renderer algorithm is to add a small amount of
latency to reduce number of dropped frames. We're especially
interested in reducing the number of consecutively dropped frames
since these are more noticeable than a single dropped frame.

The low-latency renderer algorithm is enabled from WebRTC by
setting the metadata field maximum_composition_delay_in_frames.

This is an experimental feature that is active if and only if the
RTP header extension playout-delay is set to min=0ms and max>0ms.

The feature can be completely disabled by specifying the field trial
WebRTC-LowLatencyRenderer/enabled:false/

Bug: 1138888
Change-Id: Ia59db56b115f2cb6e8348e115781f8ad0c33eedb
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2502343Reviewed-by: default avatarJeremy Roman <jbroman@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarDale Curtis <dalecurtis@chromium.org>
Commit-Queue: Johannes Kron <kron@chromium.org>
Cr-Commit-Position: refs/heads/master@{#824646}
parent 95e7230f
......@@ -56,6 +56,7 @@ void VideoFrameMetadata::MergeMetadataFrom(
MERGE_FIELD(rtp_timestamp, metadata_source);
MERGE_FIELD(receive_time, metadata_source);
MERGE_FIELD(wallclock_frame_duration, metadata_source);
MERGE_FIELD(maximum_composition_delay_in_frames, metadata_source);
}
} // namespace media
......@@ -178,6 +178,13 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// expected to spend on the screen during playback. Unlike FRAME_DURATION
// this field takes into account current playback rate.
base::Optional<base::TimeDelta> wallclock_frame_duration;
// WebRTC streams only: if present, this field represents the maximum
// composition delay that is allowed for this frame. This is respected
// in a best effort manner.
// This is an experimental feature, see crbug.com/1138888 for more
// information.
base::Optional<int> maximum_composition_delay_in_frames;
};
} // namespace media
......
......@@ -338,6 +338,7 @@ source_set("unit_tests") {
"mediarecorder/track_recorder_unittest.cc",
"mediarecorder/video_track_recorder_unittest.cc",
"mediasession/media_session_test.cc",
"mediastream/low_latency_video_renderer_algorithm_unittest.cc",
"mediastream/media_constraints_test.cc",
"mediastream/media_devices_test.cc",
"mediastream/media_stream_audio_processor_test.cc",
......
......@@ -17,6 +17,8 @@ blink_modules_sources("mediastream") {
"input_device_info.h",
"local_media_stream_audio_source.cc",
"local_media_stream_audio_source.h",
"low_latency_video_renderer_algorithm.cc",
"low_latency_video_renderer_algorithm.h",
"media_constraints_impl.cc",
"media_constraints_impl.h",
"media_device_info.cc",
......@@ -86,6 +88,8 @@ blink_modules_sources("mediastream") {
"user_media_processor.h",
"user_media_request.cc",
"user_media_request.h",
"video_renderer_algorithm_wrapper.cc",
"video_renderer_algorithm_wrapper.h",
"video_track_adapter.cc",
"video_track_adapter.h",
"video_track_adapter_settings.cc",
......
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/modules/mediastream/low_latency_video_renderer_algorithm.h"
#include "media/base/media_log.h"
namespace blink {
LowLatencyVideoRendererAlgorithm::LowLatencyVideoRendererAlgorithm(
media::MediaLog* media_log) {
Reset();
}
LowLatencyVideoRendererAlgorithm::~LowLatencyVideoRendererAlgorithm() = default;
scoped_refptr<media::VideoFrame> LowLatencyVideoRendererAlgorithm::Render(
base::TimeTicks deadline_min,
base::TimeTicks deadline_max,
size_t* frames_dropped) {
DCHECK_LE(deadline_min, deadline_max);
// TODO(crbug.com/1138888): Handle the case where the screen refresh rate and
// the video frame rate are not the same as well as occasional skips of
// rendering intervals.
if (frames_dropped) {
*frames_dropped = 0;
}
if (frame_queue_.size() > 1) {
constexpr size_t kMaxQueueSize = 30;
if (frame_queue_.size() > kMaxQueueSize) {
// The queue has grown too big. Clear all but the last enqueued frame and
// enter normal mode.
if (frames_dropped) {
*frames_dropped += frame_queue_.size() - 1;
}
while (frame_queue_.size() > 1) {
frame_queue_.pop_front();
}
mode_ = Mode::kNormal;
} else {
// There are several frames in the queue, determine if we should enter
// drain mode based on queue length and the maximum composition delay that
// is provided for the last enqueued frame.
constexpr size_t kDefaultMaxCompositionDelayInFrames = 10;
int max_queue_length = frame_queue_.back()
->metadata()
->maximum_composition_delay_in_frames.value_or(
kDefaultMaxCompositionDelayInFrames);
// The number of frames in the queue is in the range [2, kMaxQueueSize]
// due to the conditions that lead up to this point. This means that the
// active range of |max_queue_length| is [1, kMaxQueueSize].
if (max_queue_length < static_cast<int>(frame_queue_.size()))
mode_ = Mode::kDrain;
if (mode_ == Mode::kDrain) {
// Drop one frame if we're in drain moide.
frame_queue_.pop_front();
if (frames_dropped) {
++(*frames_dropped);
}
}
}
} else if (mode_ == Mode::kDrain) {
// At most one frame in the queue, exit drain mode.
mode_ = Mode::kNormal;
}
// Reduce steady-state queue length.
// Drop one frame if we have observed 10 consecutive rendered frames where
// there was a newer frame in the queue that could have been selected.
constexpr int kReduceSteadyStateQueueSizeThreshold = 10;
if (mode_ == Mode::kNormal && frame_queue_.size() >= 2) {
if (++consecutive_frames_with_back_up_ >
kReduceSteadyStateQueueSizeThreshold) {
frame_queue_.pop_front();
if (frames_dropped) {
++(*frames_dropped);
}
consecutive_frames_with_back_up_ = 0;
}
} else {
consecutive_frames_with_back_up_ = 0;
}
// Select the first frame in the queue to be rendered.
if (!frame_queue_.empty()) {
current_frame_.swap(frame_queue_.front());
frame_queue_.pop_front();
}
// Update the current render interval for subroutines.
render_interval_ = deadline_max - deadline_min;
return current_frame_;
}
void LowLatencyVideoRendererAlgorithm::Reset() {
render_interval_ = base::TimeDelta();
current_frame_.reset();
frame_queue_.clear();
mode_ = Mode::kNormal;
consecutive_frames_with_back_up_ = 0;
}
void LowLatencyVideoRendererAlgorithm::EnqueueFrame(
scoped_refptr<media::VideoFrame> frame) {
DCHECK(frame);
DCHECK(!frame->metadata()->end_of_stream);
frame_queue_.push_back(std::move(frame));
}
} // namespace blink
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_LOW_LATENCY_VIDEO_RENDERER_ALGORITHM_H_
#define THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_LOW_LATENCY_VIDEO_RENDERER_ALGORITHM_H_
#include <stddef.h>
#include <stdint.h>
#include "base/callback.h"
#include "base/macros.h"
#include "base/time/time.h"
#include "media/base/video_frame.h"
#include "third_party/blink/renderer/modules/modules_export.h"
#include "third_party/blink/renderer/platform/wtf/deque.h"
namespace media {
class MediaLog;
}
namespace blink {
class MODULES_EXPORT LowLatencyVideoRendererAlgorithm {
public:
explicit LowLatencyVideoRendererAlgorithm(media::MediaLog* media_log);
LowLatencyVideoRendererAlgorithm(const LowLatencyVideoRendererAlgorithm&) =
delete;
LowLatencyVideoRendererAlgorithm& operator=(
const LowLatencyVideoRendererAlgorithm&) = delete;
~LowLatencyVideoRendererAlgorithm();
// Chooses the best frame for the interval [deadline_min, deadline_max] based
// on available frames in the queue.
//
// If provided, |frames_dropped| will be set to the number of frames which
// were removed from |frame_queue_|, during this call, which were never
// returned during a previous Render() call and are no longer suitable for
// rendering.
scoped_refptr<media::VideoFrame> Render(base::TimeTicks deadline_min,
base::TimeTicks deadline_max,
size_t* frames_dropped);
// Adds a frame to |frame_queue_| for consideration by Render(). Frames are
// rendered in the order they are enqueued. If too many frames are in the
// queue, the algorithm will enter a drain mode where every second frame will
// be dropped.
void EnqueueFrame(scoped_refptr<media::VideoFrame> frame);
// Removes all frames from |frame_queue_|.
void Reset();
// Returns number of frames in the queue. If a frame is currently being
// rendered it will be included in the count.
size_t frames_queued() const {
return frame_queue_.size() + (current_frame_ ? 1 : 0);
}
// Returns the average of the duration of a frame. Currently hard coded at
// 60fps.
base::TimeDelta average_frame_duration() const {
// TODO(crbug.com/1138888): Estimate frame duration from content.
return base::TimeDelta::FromMillisecondsD(1000.0 / 60.0);
}
private:
scoped_refptr<media::VideoFrame> current_frame_;
// Queue of incoming frames waiting for rendering.
using VideoFrameQueue = WTF::Deque<scoped_refptr<media::VideoFrame>>;
VideoFrameQueue frame_queue_;
// The length of the last deadline interval given to Render(), updated at the
// start of Render().
base::TimeDelta render_interval_;
enum class Mode {
// Render frames at their intended rate.
kNormal = 0,
// Render frames at the double rate. This mode is used to drop frames in a
// controlled manner whenever there's too many frames in the queue.
kDrain = 1,
kMaxValue = kDrain
};
Mode mode_;
// The number of consecutive render frames with a post-decode queue back-up
// (defined as greater than one frame).
int consecutive_frames_with_back_up_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_LOW_LATENCY_VIDEO_RENDERER_ALGORITHM_H_
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <queue>
#include "media/base/video_frame_pool.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/modules/mediastream/low_latency_video_renderer_algorithm.h"
namespace blink {
class LowLatencyVideoRendererAlgorithmTest : public testing::Test {
public:
LowLatencyVideoRendererAlgorithmTest()
: algorithm_(nullptr),
current_render_time_(base::TimeTicks() + base::TimeDelta::FromDays(1)) {
}
~LowLatencyVideoRendererAlgorithmTest() override = default;
scoped_refptr<media::VideoFrame> CreateFrame(
size_t maximum_composition_delay_in_frames) {
const gfx::Size natural_size(8, 8);
scoped_refptr<media::VideoFrame> frame = frame_pool_.CreateFrame(
media::PIXEL_FORMAT_I420, natural_size, gfx::Rect(natural_size),
natural_size, base::TimeDelta());
frame->metadata()->maximum_composition_delay_in_frames =
maximum_composition_delay_in_frames;
return frame;
}
size_t frames_queued() const { return algorithm_.frames_queued(); }
scoped_refptr<media::VideoFrame> RenderAndStep(size_t* frames_dropped) {
constexpr base::TimeDelta kRenderInterval =
base::TimeDelta::FromMillisecondsD(1000.0 / 60.0); // 60fps.
const base::TimeTicks start = current_render_time_;
current_render_time_ += kRenderInterval;
const base::TimeTicks end = current_render_time_;
return algorithm_.Render(start, end, frames_dropped);
}
protected:
media::VideoFramePool frame_pool_;
LowLatencyVideoRendererAlgorithm algorithm_;
base::TimeTicks current_render_time_;
private:
DISALLOW_COPY_AND_ASSIGN(LowLatencyVideoRendererAlgorithmTest);
};
TEST_F(LowLatencyVideoRendererAlgorithmTest, Empty) {
size_t frames_dropped = 0;
EXPECT_EQ(0u, frames_queued());
EXPECT_FALSE(RenderAndStep(&frames_dropped));
EXPECT_EQ(0u, frames_dropped);
EXPECT_EQ(0u, frames_queued());
}
TEST_F(LowLatencyVideoRendererAlgorithmTest, NormalMode) {
// Every frame rendered.
constexpr int kNumberOfFrames = 100;
constexpr int kMaxCompositionDelayInFrames = 6;
for (int i = 0; i < kNumberOfFrames; ++i) {
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
int frame_id = frame->unique_id();
algorithm_.EnqueueFrame(std::move(frame));
size_t frames_dropped = 0u;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(rendered_frame->unique_id(), frame_id);
EXPECT_EQ(frames_dropped, 0u);
}
}
TEST_F(LowLatencyVideoRendererAlgorithmTest, EnterDrainMode) {
// Enter drain mode when more than 6 frames are in the queue.
constexpr int kMaxCompositionDelayInFrames = 6;
constexpr int kNumberOfFramesSubmitted = kMaxCompositionDelayInFrames + 1;
std::queue<int> enqueued_frame_ids;
for (int i = 0; i < kNumberOfFramesSubmitted; ++i) {
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
}
// Every other frame will be rendered until there's one frame in the queue.
int processed_frames_count = 0;
while (processed_frames_count < kNumberOfFramesSubmitted - 1) {
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(frames_dropped, 1u);
enqueued_frame_ids.pop();
EXPECT_EQ(rendered_frame->unique_id(), enqueued_frame_ids.front());
enqueued_frame_ids.pop();
processed_frames_count += 1 + frames_dropped;
}
// One more frame to render.
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(frames_dropped, 0u);
EXPECT_EQ(rendered_frame->unique_id(), enqueued_frame_ids.front());
enqueued_frame_ids.pop();
EXPECT_EQ(enqueued_frame_ids.size(), 0u);
}
TEST_F(LowLatencyVideoRendererAlgorithmTest, ExitDrainMode) {
// Enter drain mode when more than 6 frames are in the queue.
constexpr int kMaxCompositionDelayInFrames = 6;
int number_of_frames_submitted = kMaxCompositionDelayInFrames + 1;
std::queue<int> enqueued_frame_ids;
for (int i = 0; i < number_of_frames_submitted; ++i) {
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
}
// Every other frame will be rendered until there's one frame in the queue.
int processed_frames_count = 0;
while (processed_frames_count < number_of_frames_submitted - 1) {
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(frames_dropped, 1u);
enqueued_frame_ids.pop();
EXPECT_EQ(rendered_frame->unique_id(), enqueued_frame_ids.front());
enqueued_frame_ids.pop();
// Enqueue a new frame.
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
++number_of_frames_submitted;
processed_frames_count += 1 + frames_dropped;
}
// Continue in normal mode without dropping frames.
constexpr int kNumberOfFramesInNormalMode = 30;
for (int i = 0; i < kNumberOfFramesInNormalMode; ++i) {
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(frames_dropped, 0u);
EXPECT_EQ(rendered_frame->unique_id(), enqueued_frame_ids.front());
enqueued_frame_ids.pop();
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
}
}
TEST_F(LowLatencyVideoRendererAlgorithmTest, SteadyStateQueueReduction) {
// Create an initial queue of 8 frames.
constexpr int kMaxCompositionDelayInFrames = 10;
constexpr size_t kInitialQueueSize = 8;
std::queue<int> enqueued_frame_ids;
for (size_t i = 0; i < kInitialQueueSize; ++i) {
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
}
EXPECT_EQ(frames_queued(), kInitialQueueSize);
constexpr size_t kNumberOfFramesSubmitted = 100;
constexpr int kMinimumNumberOfFramesBetweenDrops = 8;
int processed_frames_since_last_frame_drop = 0;
for (size_t i = kInitialQueueSize; i < kNumberOfFramesSubmitted; ++i) {
// Every frame will be rendered with occasional frame drops to reduce the
// steady state queue.
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
if (frames_dropped > 0) {
ASSERT_EQ(frames_dropped, 1u);
EXPECT_GE(processed_frames_since_last_frame_drop,
kMinimumNumberOfFramesBetweenDrops);
enqueued_frame_ids.pop();
processed_frames_since_last_frame_drop = 0;
} else {
++processed_frames_since_last_frame_drop;
}
EXPECT_EQ(rendered_frame->unique_id(), enqueued_frame_ids.front());
enqueued_frame_ids.pop();
// Enqueue a new frame.
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
enqueued_frame_ids.push(frame->unique_id());
algorithm_.EnqueueFrame(std::move(frame));
}
// Steady state queue should now have been reduced to one frame + the current
// frame that is also counted.
EXPECT_EQ(frames_queued(), 2u);
}
TEST_F(LowLatencyVideoRendererAlgorithmTest,
DropAllFramesIfQueueExceedsMaxSize) {
// Create an initial queue of 60 frames.
constexpr int kMaxCompositionDelayInFrames = 10;
constexpr size_t kInitialQueueSize = 60;
int last_id = 0;
for (size_t i = 0; i < kInitialQueueSize; ++i) {
scoped_refptr<media::VideoFrame> frame =
CreateFrame(kMaxCompositionDelayInFrames);
last_id = frame->unique_id();
algorithm_.EnqueueFrame(std::move(frame));
}
EXPECT_EQ(frames_queued(), kInitialQueueSize);
// Last submitted frame should be rendered.
size_t frames_dropped = 0;
scoped_refptr<media::VideoFrame> rendered_frame =
RenderAndStep(&frames_dropped);
ASSERT_TRUE(rendered_frame);
EXPECT_EQ(frames_dropped, kInitialQueueSize - 1);
EXPECT_EQ(rendered_frame->unique_id(), last_id);
}
} // namespace blink
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/modules/mediastream/video_renderer_algorithm_wrapper.h"
namespace blink {
VideoRendererAlgorithmWrapper::VideoRendererAlgorithmWrapper(
const media::TimeSource::WallClockTimeCB& wall_clock_time_cb,
media::MediaLog* media_log)
: wall_clock_time_cb_(wall_clock_time_cb),
media_log_(media_log),
renderer_algorithm_(RendererAlgorithm::Default) {
default_rendering_frame_buffer_ =
std::make_unique<media::VideoRendererAlgorithm>(wall_clock_time_cb_,
media_log_);
}
scoped_refptr<media::VideoFrame> VideoRendererAlgorithmWrapper::Render(
base::TimeTicks deadline_min,
base::TimeTicks deadline_max,
size_t* frames_dropped) {
return renderer_algorithm_ == RendererAlgorithm::Default
? default_rendering_frame_buffer_->Render(
deadline_min, deadline_max, frames_dropped)
: low_latency_rendering_frame_buffer_->Render(
deadline_min, deadline_max, frames_dropped);
}
void VideoRendererAlgorithmWrapper::EnqueueFrame(
scoped_refptr<media::VideoFrame> frame) {
DCHECK(frame);
if (renderer_algorithm_ == RendererAlgorithm::Default &&
frame->metadata()->maximum_composition_delay_in_frames) {
default_rendering_frame_buffer_.release();
low_latency_rendering_frame_buffer_ =
std::make_unique<LowLatencyVideoRendererAlgorithm>(media_log_);
renderer_algorithm_ = RendererAlgorithm::LowLatency;
}
return renderer_algorithm_ == RendererAlgorithm::Default
? default_rendering_frame_buffer_->EnqueueFrame(frame)
: low_latency_rendering_frame_buffer_->EnqueueFrame(frame);
}
void VideoRendererAlgorithmWrapper::Reset(
media::VideoRendererAlgorithm::ResetFlag reset_flag) {
return renderer_algorithm_ == RendererAlgorithm::Default
? default_rendering_frame_buffer_->Reset(reset_flag)
: low_latency_rendering_frame_buffer_->Reset();
}
bool VideoRendererAlgorithmWrapper::NeedsReferenceTime() const {
return renderer_algorithm_ == RendererAlgorithm::Default;
}
} // namespace blink
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_VIDEO_RENDERER_ALGORITHM_WRAPPER_H_
#define THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_VIDEO_RENDERER_ALGORITHM_WRAPPER_H_
#include <stddef.h>
#include <stdint.h>
#include "base/callback.h"
#include "base/macros.h"
#include "base/time/time.h"
#include "media/base/media_util.h"
#include "media/base/time_source.h"
#include "media/base/video_frame.h"
#include "media/filters/video_renderer_algorithm.h"
#include "third_party/blink/renderer/modules/mediastream/low_latency_video_renderer_algorithm.h"
namespace blink {
class VideoRendererAlgorithmWrapper {
public:
VideoRendererAlgorithmWrapper(
const media::TimeSource::WallClockTimeCB& wall_clock_time_cb,
media::MediaLog* media_log);
scoped_refptr<media::VideoFrame> Render(base::TimeTicks deadline_min,
base::TimeTicks deadline_max,
size_t* frames_dropped);
void EnqueueFrame(scoped_refptr<media::VideoFrame> frame);
void Reset(media::VideoRendererAlgorithm::ResetFlag reset_flag =
media::VideoRendererAlgorithm::ResetFlag::kEverything);
size_t frames_queued() const {
return renderer_algorithm_ == RendererAlgorithm::Default
? default_rendering_frame_buffer_->frames_queued()
: low_latency_rendering_frame_buffer_->frames_queued();
}
base::TimeDelta average_frame_duration() const {
return renderer_algorithm_ == RendererAlgorithm::Default
? default_rendering_frame_buffer_->average_frame_duration()
: low_latency_rendering_frame_buffer_->average_frame_duration();
}
bool NeedsReferenceTime() const;
private:
enum RendererAlgorithm { Default, LowLatency };
const media::TimeSource::WallClockTimeCB wall_clock_time_cb_;
media::MediaLog* media_log_;
RendererAlgorithm renderer_algorithm_;
std::unique_ptr<media::VideoRendererAlgorithm>
default_rendering_frame_buffer_;
std::unique_ptr<LowLatencyVideoRendererAlgorithm>
low_latency_rendering_frame_buffer_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_MODULES_MEDIASTREAM_VIDEO_RENDERER_ALGORITHM_WRAPPER_H_
......@@ -17,7 +17,6 @@
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "media/filters/video_renderer_algorithm.h"
#include "media/renderers/paint_canvas_video_renderer.h"
#include "services/viz/public/cpp/gpu/context_provider_command_buffer.h"
#include "skia/ext/platform_canvas.h"
......@@ -187,11 +186,11 @@ WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor(
if (remote_video && Platform::Current()->RTCSmoothnessAlgorithmEnabled()) {
base::AutoLock auto_lock(current_frame_lock_);
rendering_frame_buffer_.reset(new media::VideoRendererAlgorithm(
rendering_frame_buffer_ = std::make_unique<VideoRendererAlgorithmWrapper>(
ConvertToBaseRepeatingCallback(CrossThreadBindRepeating(
&WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks,
CrossThreadUnretained(this))),
&media_log_));
&media_log_);
}
// Just for logging purpose.
......@@ -348,10 +347,16 @@ void WebMediaPlayerMSCompositor::EnqueueFrame(
return;
}
// If we detect a bad frame without |render_time|, we switch off algorithm,
// because without |render_time|, algorithm cannot work.
// In general, this should not happen.
if (!frame->metadata()->reference_time.has_value()) {
// If we detect a bad frame without |reference_time|, we switch off algorithm,
// because without |reference_time|, algorithm cannot work.
// |reference_time| is not set for low-latency video streams and are therefore
// rendered without algorithm, unless |maximum_composition_delay_in_frames| is
// set in which case a dedicated low-latency algorithm is switched on. Please
// note that this is an experimental feature that is only active if certain
// experimental parameters are specified in WebRTC. See crbug.com/1138888 for
// more information.
if (!frame->metadata()->reference_time.has_value() &&
!frame->metadata()->maximum_composition_delay_in_frames) {
DLOG(WARNING)
<< "Incoming VideoFrames have no reference_time, switching off super "
"sophisticated rendering algorithm";
......@@ -359,7 +364,9 @@ void WebMediaPlayerMSCompositor::EnqueueFrame(
RenderWithoutAlgorithm(std::move(frame), is_copy);
return;
}
base::TimeTicks render_time = *frame->metadata()->reference_time;
base::TimeTicks render_time = frame->metadata()->reference_time
? *frame->metadata()->reference_time
: base::TimeTicks();
// The code below handles the case where UpdateCurrentFrame() callbacks stop.
// These callbacks can stop when the tab is hidden or the page area containing
......@@ -409,11 +416,12 @@ bool WebMediaPlayerMSCompositor::UpdateCurrentFrame(
base::TimeTicks render_time =
current_frame_->metadata()->reference_time.value_or(
base::TimeTicks());
if (!current_frame_->metadata()->reference_time.has_value()) {
DCHECK(!rendering_frame_buffer_)
<< "VideoFrames need REFERENCE_TIME to use "
"sophisticated video rendering algorithm.";
}
DCHECK(current_frame_->metadata()->reference_time.has_value() ||
!rendering_frame_buffer_ ||
(rendering_frame_buffer_ &&
!rendering_frame_buffer_->NeedsReferenceTime()))
<< "VideoFrames need REFERENCE_TIME to use "
"sophisticated video rendering algorithm.";
TRACE_EVENT_END2("media", "UpdateCurrentFrame", "Ideal Render Instant",
render_time.ToInternalValue(), "Serial", serial_);
}
......@@ -759,11 +767,11 @@ void WebMediaPlayerMSCompositor::SetAlgorithmEnabledForTesting(
}
if (!rendering_frame_buffer_) {
rendering_frame_buffer_.reset(new media::VideoRendererAlgorithm(
rendering_frame_buffer_ = std::make_unique<VideoRendererAlgorithmWrapper>(
WTF::BindRepeating(
&WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks,
WTF::Unretained(this)),
&media_log_));
&media_log_);
}
}
......
......@@ -20,6 +20,7 @@
#include "media/base/media_util.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_video_frame_submitter.h"
#include "third_party/blink/renderer/modules/mediastream/video_renderer_algorithm_wrapper.h"
#include "third_party/blink/renderer/modules/modules_export.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
#include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h"
......@@ -233,7 +234,7 @@ class MODULES_EXPORT WebMediaPlayerMSCompositor
// |rendering_frame_buffer_| stores the incoming frames, and provides a frame
// selection method which returns the best frame for the render interval.
std::unique_ptr<media::VideoRendererAlgorithm> rendering_frame_buffer_;
std::unique_ptr<VideoRendererAlgorithmWrapper> rendering_frame_buffer_;
// |current_frame_rendered_| is updated on compositor thread only.
// It's used to track whether |current_frame_| was painted for detecting
......
......@@ -299,6 +299,11 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
if (!render_immediately)
video_frame->metadata()->reference_time = render_time;
if (incoming_frame.max_composition_delay_in_frames()) {
video_frame->metadata()->maximum_composition_delay_in_frames =
*incoming_frame.max_composition_delay_in_frames();
}
video_frame->metadata()->decode_end_time = current_time;
// RTP_TIMESTAMP, PROCESSING_TIME, and CAPTURE_BEGIN_TIME are all exposed
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment