Commit 63f2ddbb authored by Markus Handell's avatar Markus Handell Committed by Commit Bot

MediaStreamRemoteVideoSource: provide frame timestamps.

MSRVS only provided base::TimeTicks() (0) as frame timestamps for already
decoded frames. This interacted badly with MediaRecorder which received
non-monotonic timestamps.

Bug: 1048140
Change-Id: I9f351f6631154341fd07f5e3dbd429a4eff73a23
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2033766Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Commit-Queue: Markus Handell <handellm@google.com>
Cr-Commit-Position: refs/heads/master@{#738129}
parent 4f2ef285
......@@ -118,7 +118,8 @@ class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
// VideoSinkInterface<webrtc::RecordableEncodedFrame>
void OnFrame(const webrtc::RecordableEncodedFrame& frame) override;
void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame);
void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks estimated_capture_time);
private:
void OnEncodedVideoFrameOnIO(scoped_refptr<EncodedVideoFrame> frame,
......@@ -315,18 +316,23 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
media::VideoFrameMetadata::CAPTURE_BEGIN_TIME, capture_time);
}
// Use our computed render time as estimated capture time. If timestamp_us()
// (which is actually the suggested render time) is set by WebRTC, it's based
// on the RTP timestamps in the frame's packets, so congruent with the
// received frame capture timestamps. If set by us, it's as congruent as we
// can get with the timestamp sequence of frames we received.
PostCrossThreadTask(
*io_task_runner_, FROM_HERE,
CrossThreadBindOnce(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread,
WrapRefCounted(this), video_frame));
WrapRefCounted(this), video_frame, render_time));
}
void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::
DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame) {
DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks estimated_capture_time) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::DoRenderFrameOnIOThread");
// TODO(hclam): Give the estimated capture time.
frame_callback_.Run(std::move(video_frame), base::TimeTicks());
frame_callback_.Run(std::move(video_frame), estimated_capture_time);
}
void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
......@@ -342,6 +348,11 @@ void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame(
? base::TimeTicks() + incoming_timestamp
: base::TimeTicks() + incoming_timestamp + time_diff_encoded_;
// Use our computed render time as estimated capture time. If render_time()
// is set by WebRTC, it's based on the RTP timestamps in the frame's packets,
// so congruent with the received frame capture timestamps. If set by us, it's
// as congruent as we can get with the timestamp sequence of frames we
// received.
PostCrossThreadTask(
*io_task_runner_, FROM_HERE,
CrossThreadBindOnce(&RemoteVideoSourceDelegate::OnEncodedVideoFrameOnIO,
......
......@@ -39,12 +39,24 @@ namespace {
// for both here and in MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
// we need to use the worst case difference between these two measurements.
float kChromiumWebRtcMaxTimeDiffMs = 40.0f;
using ::testing::_;
using ::testing::Gt;
using ::testing::SaveArg;
using ::testing::Sequence;
} // namespace
ACTION_P(RunClosure, closure) {
closure.Run();
}
webrtc::VideoFrame::Builder CreateBlackFrameBuilder() {
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
webrtc::I420Buffer::Create(8, 8);
webrtc::I420Buffer::SetBlack(buffer);
return webrtc::VideoFrame::Builder().set_video_frame_buffer(buffer);
}
class MediaStreamRemoteVideoSourceUnderTest
: public blink::MediaStreamRemoteVideoSource {
public:
......@@ -422,6 +434,9 @@ TEST_F(MediaStreamRemoteVideoSourceTest, NoTimestampUsMeansNoReferenceTime) {
class TestEncodedVideoFrame : public webrtc::RecordableEncodedFrame {
public:
explicit TestEncodedVideoFrame(webrtc::Timestamp timestamp)
: timestamp_(timestamp) {}
rtc::scoped_refptr<const webrtc::EncodedImageBufferInterface> encoded_buffer()
const override {
return nullptr;
......@@ -436,9 +451,10 @@ class TestEncodedVideoFrame : public webrtc::RecordableEncodedFrame {
EncodedResolution resolution() const override {
return EncodedResolution{0, 0};
}
webrtc::Timestamp render_time() const override {
return webrtc::Timestamp::ms(0);
}
webrtc::Timestamp render_time() const override { return timestamp_; }
private:
webrtc::Timestamp timestamp_;
};
TEST_F(MediaStreamRemoteVideoSourceTest, ForwardsEncodedVideoFrames) {
......@@ -449,7 +465,112 @@ TEST_F(MediaStreamRemoteVideoSourceTest, ForwardsEncodedVideoFrames) {
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(sink, OnEncodedVideoFrame)
.WillOnce(RunClosure(std::move(quit_closure)));
source()->EncodedSinkInterfaceForTesting()->OnFrame(TestEncodedVideoFrame());
source()->EncodedSinkInterfaceForTesting()->OnFrame(
TestEncodedVideoFrame(webrtc::Timestamp::ms(0)));
run_loop.Run();
track->RemoveEncodedSink(&sink);
}
TEST_F(MediaStreamRemoteVideoSourceTest,
ForwardsFramesWithIncreasingTimestampsWithNullSourceTimestamp) {
std::unique_ptr<blink::MediaStreamVideoTrack> track(CreateTrack());
blink::MockMediaStreamVideoSink sink;
track->AddSink(&sink, sink.GetDeliverFrameCB(), /*is_link_secure=*/false);
base::RunLoop run_loop;
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
base::TimeTicks frame_timestamp1;
Sequence s;
EXPECT_CALL(sink, OnVideoFrame)
.InSequence(s)
.WillOnce(SaveArg<0>(&frame_timestamp1));
EXPECT_CALL(sink, OnVideoFrame(Gt(frame_timestamp1)))
.InSequence(s)
.WillOnce(RunClosure(std::move(quit_closure)));
source()->SinkInterfaceForTesting()->OnFrame(
CreateBlackFrameBuilder().set_timestamp_ms(0).build());
// Spin until the time counter changes.
base::TimeTicks now = base::TimeTicks::Now();
while (base::TimeTicks::Now() == now) {
}
source()->SinkInterfaceForTesting()->OnFrame(
CreateBlackFrameBuilder().set_timestamp_ms(0).build());
run_loop.Run();
track->RemoveSink(&sink);
}
TEST_F(MediaStreamRemoteVideoSourceTest,
ForwardsFramesWithIncreasingTimestampsWithSourceTimestamp) {
std::unique_ptr<blink::MediaStreamVideoTrack> track(CreateTrack());
blink::MockMediaStreamVideoSink sink;
track->AddSink(&sink, sink.GetDeliverFrameCB(), /*is_link_secure=*/false);
base::RunLoop run_loop;
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
base::TimeTicks frame_timestamp1;
Sequence s;
EXPECT_CALL(sink, OnVideoFrame)
.InSequence(s)
.WillOnce(SaveArg<0>(&frame_timestamp1));
EXPECT_CALL(sink, OnVideoFrame(Gt(frame_timestamp1)))
.InSequence(s)
.WillOnce(RunClosure(std::move(quit_closure)));
source()->SinkInterfaceForTesting()->OnFrame(
CreateBlackFrameBuilder().set_timestamp_ms(4711).build());
source()->SinkInterfaceForTesting()->OnFrame(
CreateBlackFrameBuilder().set_timestamp_ms(4712).build());
run_loop.Run();
track->RemoveSink(&sink);
}
TEST_F(MediaStreamRemoteVideoSourceTest,
ForwardsEncodedFramesWithIncreasingTimestampsWithNullSourceTimestamp) {
std::unique_ptr<blink::MediaStreamVideoTrack> track(CreateTrack());
blink::MockMediaStreamVideoSink sink;
track->AddEncodedSink(&sink, sink.GetDeliverEncodedVideoFrameCB());
base::RunLoop run_loop;
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
base::TimeTicks frame_timestamp1;
Sequence s;
EXPECT_CALL(sink, OnEncodedVideoFrame)
.InSequence(s)
.WillOnce(SaveArg<0>(&frame_timestamp1));
EXPECT_CALL(sink, OnEncodedVideoFrame(Gt(frame_timestamp1)))
.InSequence(s)
.WillOnce(RunClosure(std::move(quit_closure)));
source()->EncodedSinkInterfaceForTesting()->OnFrame(
TestEncodedVideoFrame(webrtc::Timestamp::ms(0)));
// Spin until the time counter changes.
base::TimeTicks now = base::TimeTicks::Now();
while (base::TimeTicks::Now() == now) {
}
source()->EncodedSinkInterfaceForTesting()->OnFrame(
TestEncodedVideoFrame(webrtc::Timestamp::ms(0)));
run_loop.Run();
track->RemoveEncodedSink(&sink);
}
TEST_F(MediaStreamRemoteVideoSourceTest,
ForwardsEncodedFramesWithIncreasingTimestampsWithSourceTimestamp) {
std::unique_ptr<blink::MediaStreamVideoTrack> track(CreateTrack());
blink::MockMediaStreamVideoSink sink;
track->AddEncodedSink(&sink, sink.GetDeliverEncodedVideoFrameCB());
base::RunLoop run_loop;
base::RepeatingClosure quit_closure = run_loop.QuitClosure();
base::TimeTicks frame_timestamp1;
Sequence s;
EXPECT_CALL(sink, OnEncodedVideoFrame)
.InSequence(s)
.WillOnce(SaveArg<0>(&frame_timestamp1));
EXPECT_CALL(sink, OnEncodedVideoFrame(Gt(frame_timestamp1)))
.InSequence(s)
.WillOnce(RunClosure(std::move(quit_closure)));
source()->EncodedSinkInterfaceForTesting()->OnFrame(
TestEncodedVideoFrame(webrtc::Timestamp::ms(42)));
source()->EncodedSinkInterfaceForTesting()->OnFrame(
TestEncodedVideoFrame(webrtc::Timestamp::ms(43)));
run_loop.Run();
track->RemoveEncodedSink(&sink);
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment