Commit ae2dfa8a authored by perkj@chromium.org's avatar perkj@chromium.org

Change a disabled MediaStreamVideoTrack to output black video frames for each incoming frame.

The frame provided to the track is unchanged. Instead a black frame with the same timestamp is created and forwarded to the sinks.
Please see the bug and the discussion in https://groups.google.com/forum/#!topic/discuss-webrtc/nZWA0prohio for rational.

BUG=398844
TEST= open https://apprtc.appspot.com in two tabs to create a call. Hit Ctrl-shift-e in the first tab to disable the local video track. Notice that local video is black.  Open the second tab and notice that the remote video is black.

Review URL: https://codereview.chromium.org/432903002

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@287787 0039d316-1c4b-4281-b951-d872f2087c98
parent 56d55253
...@@ -19,7 +19,9 @@ void ResetCallback(scoped_ptr<VideoCaptureDeliverFrameCB> callback) { ...@@ -19,7 +19,9 @@ void ResetCallback(scoped_ptr<VideoCaptureDeliverFrameCB> callback) {
// MediaStreamVideoTrack::FrameDeliverer is a helper class used for registering // MediaStreamVideoTrack::FrameDeliverer is a helper class used for registering
// VideoCaptureDeliverFrameCB on the main render thread to receive video frames // VideoCaptureDeliverFrameCB on the main render thread to receive video frames
// on the IO-thread. // on the IO-thread.
// Frames are only delivered to the sinks if the track is enabled. // Frames are only delivered to the sinks if the track is enabled. If the track
// is disabled, a black frame is instead forwarded to the sinks at the same
// frame rate.
class MediaStreamVideoTrack::FrameDeliverer class MediaStreamVideoTrack::FrameDeliverer
: public base::RefCountedThreadSafe<FrameDeliverer> { : public base::RefCountedThreadSafe<FrameDeliverer> {
public: public:
...@@ -53,6 +55,10 @@ class MediaStreamVideoTrack::FrameDeliverer ...@@ -53,6 +55,10 @@ class MediaStreamVideoTrack::FrameDeliverer
void* id, const scoped_refptr<base::MessageLoopProxy>& message_loop); void* id, const scoped_refptr<base::MessageLoopProxy>& message_loop);
void SetEnabledOnIO(bool enabled); void SetEnabledOnIO(bool enabled);
// Returns |black_frame_| where the size and time stamp is set to the same as
// as in |reference_frame|.
const scoped_refptr<media::VideoFrame>& GetBlackFrame(
const scoped_refptr<media::VideoFrame>& reference_frame);
// Used to DCHECK that AddCallback and RemoveCallback are called on the main // Used to DCHECK that AddCallback and RemoveCallback are called on the main
// render thread. // render thread.
...@@ -60,6 +66,7 @@ class MediaStreamVideoTrack::FrameDeliverer ...@@ -60,6 +66,7 @@ class MediaStreamVideoTrack::FrameDeliverer
scoped_refptr<base::MessageLoopProxy> io_message_loop_; scoped_refptr<base::MessageLoopProxy> io_message_loop_;
bool enabled_; bool enabled_;
scoped_refptr<media::VideoFrame> black_frame_;
typedef std::pair<void*, VideoCaptureDeliverFrameCB> VideoIdCallbackPair; typedef std::pair<void*, VideoCaptureDeliverFrameCB> VideoIdCallbackPair;
std::vector<VideoIdCallbackPair> callbacks_; std::vector<VideoIdCallbackPair> callbacks_;
...@@ -131,6 +138,8 @@ void MediaStreamVideoTrack::FrameDeliverer::SetEnabled(bool enabled) { ...@@ -131,6 +138,8 @@ void MediaStreamVideoTrack::FrameDeliverer::SetEnabled(bool enabled) {
void MediaStreamVideoTrack::FrameDeliverer::SetEnabledOnIO(bool enabled) { void MediaStreamVideoTrack::FrameDeliverer::SetEnabledOnIO(bool enabled) {
DCHECK(io_message_loop_->BelongsToCurrentThread()); DCHECK(io_message_loop_->BelongsToCurrentThread());
enabled_ = enabled; enabled_ = enabled;
if (enabled_)
black_frame_ = NULL;
} }
void MediaStreamVideoTrack::FrameDeliverer::DeliverFrameOnIO( void MediaStreamVideoTrack::FrameDeliverer::DeliverFrameOnIO(
...@@ -138,14 +147,28 @@ void MediaStreamVideoTrack::FrameDeliverer::DeliverFrameOnIO( ...@@ -138,14 +147,28 @@ void MediaStreamVideoTrack::FrameDeliverer::DeliverFrameOnIO(
const media::VideoCaptureFormat& format, const media::VideoCaptureFormat& format,
const base::TimeTicks& estimated_capture_time) { const base::TimeTicks& estimated_capture_time) {
DCHECK(io_message_loop_->BelongsToCurrentThread()); DCHECK(io_message_loop_->BelongsToCurrentThread());
if (!enabled_) const scoped_refptr<media::VideoFrame>& video_frame =
return; enabled_ ? frame : GetBlackFrame(frame);
for (std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin(); for (std::vector<VideoIdCallbackPair>::iterator it = callbacks_.begin();
it != callbacks_.end(); ++it) { it != callbacks_.end(); ++it) {
it->second.Run(frame, format, estimated_capture_time); it->second.Run(video_frame, format, estimated_capture_time);
} }
} }
const scoped_refptr<media::VideoFrame>&
MediaStreamVideoTrack::FrameDeliverer::GetBlackFrame(
const scoped_refptr<media::VideoFrame>& reference_frame) {
DCHECK(io_message_loop_->BelongsToCurrentThread());
if (!black_frame_ ||
black_frame_->natural_size() != reference_frame->natural_size())
black_frame_ =
media::VideoFrame::CreateBlackFrame(reference_frame->natural_size());
black_frame_->set_timestamp(reference_frame->timestamp());
return black_frame_;
}
// static // static
blink::WebMediaStreamTrack MediaStreamVideoTrack::CreateVideoTrack( blink::WebMediaStreamTrack MediaStreamVideoTrack::CreateVideoTrack(
MediaStreamVideoSource* source, MediaStreamVideoSource* source,
......
...@@ -18,6 +18,9 @@ ...@@ -18,6 +18,9 @@
namespace content { namespace content {
const uint8 kBlackValue = 0x00;
const uint8 kColorValue = 0xAB;
ACTION_P(RunClosure, closure) { ACTION_P(RunClosure, closure) {
closure.Run(); closure.Run();
} }
...@@ -43,9 +46,10 @@ class MediaStreamVideoTrackTest : public ::testing::Test { ...@@ -43,9 +46,10 @@ class MediaStreamVideoTrackTest : public ::testing::Test {
EXPECT_CALL(*sink, OnVideoFrame()).WillOnce( EXPECT_CALL(*sink, OnVideoFrame()).WillOnce(
RunClosure(quit_closure)); RunClosure(quit_closure));
scoped_refptr<media::VideoFrame> frame = scoped_refptr<media::VideoFrame> frame =
media::VideoFrame::CreateBlackFrame( media::VideoFrame::CreateColorFrame(
gfx::Size(MediaStreamVideoSource::kDefaultWidth, gfx::Size(MediaStreamVideoSource::kDefaultWidth,
MediaStreamVideoSource::kDefaultHeight)); MediaStreamVideoSource::kDefaultHeight),
kColorValue, kColorValue, kColorValue, base::TimeDelta());
mock_source()->DeliverVideoFrame(frame); mock_source()->DeliverVideoFrame(frame);
run_loop.Run(); run_loop.Run();
} }
...@@ -163,24 +167,20 @@ TEST_F(MediaStreamVideoTrackTest, SetEnabled) { ...@@ -163,24 +167,20 @@ TEST_F(MediaStreamVideoTrackTest, SetEnabled) {
DeliverVideoFrameAndWaitForRenderer(&sink); DeliverVideoFrameAndWaitForRenderer(&sink);
EXPECT_EQ(1, sink.number_of_frames()); EXPECT_EQ(1, sink.number_of_frames());
EXPECT_EQ(kColorValue, *sink.last_frame()->data(media::VideoFrame::kYPlane));
video_track->SetEnabled(false); video_track->SetEnabled(false);
EXPECT_FALSE(sink.enabled()); EXPECT_FALSE(sink.enabled());
scoped_refptr<media::VideoFrame> frame = DeliverVideoFrameAndWaitForRenderer(&sink);
media::VideoFrame::CreateBlackFrame( EXPECT_EQ(2, sink.number_of_frames());
gfx::Size(MediaStreamVideoSource::kDefaultWidth, EXPECT_EQ(kBlackValue, *sink.last_frame()->data(media::VideoFrame::kYPlane));
MediaStreamVideoSource::kDefaultHeight));
mock_source()->DeliverVideoFrame(frame);
// Wait for the IO thread to complete delivering frames.
io_message_loop()->RunUntilIdle();
EXPECT_EQ(1, sink.number_of_frames());
video_track->SetEnabled(true); video_track->SetEnabled(true);
EXPECT_TRUE(sink.enabled()); EXPECT_TRUE(sink.enabled());
mock_source()->DeliverVideoFrame(frame);
DeliverVideoFrameAndWaitForRenderer(&sink); DeliverVideoFrameAndWaitForRenderer(&sink);
EXPECT_EQ(2, sink.number_of_frames()); EXPECT_EQ(3, sink.number_of_frames());
EXPECT_EQ(kColorValue, *sink.last_frame()->data(media::VideoFrame::kYPlane));
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track); MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
} }
......
...@@ -31,6 +31,7 @@ void MockMediaStreamVideoSink::DeliverVideoFrame( ...@@ -31,6 +31,7 @@ void MockMediaStreamVideoSink::DeliverVideoFrame(
const scoped_refptr<media::VideoFrame>& frame, const scoped_refptr<media::VideoFrame>& frame,
const media::VideoCaptureFormat& format, const media::VideoCaptureFormat& format,
const base::TimeTicks& estimated_capture_time) { const base::TimeTicks& estimated_capture_time) {
last_frame_ = frame;
++number_of_frames_; ++number_of_frames_;
format_ = frame->format(); format_ = frame->format();
frame_size_ = frame->natural_size(); frame_size_ = frame->natural_size();
......
...@@ -32,6 +32,7 @@ class MockMediaStreamVideoSink : public MediaStreamVideoSink { ...@@ -32,6 +32,7 @@ class MockMediaStreamVideoSink : public MediaStreamVideoSink {
int number_of_frames() const { return number_of_frames_; } int number_of_frames() const { return number_of_frames_; }
media::VideoFrame::Format format() const { return format_; } media::VideoFrame::Format format() const { return format_; }
gfx::Size frame_size() const { return frame_size_; } gfx::Size frame_size() const { return frame_size_; }
scoped_refptr<media::VideoFrame> last_frame() const { return last_frame_; };
bool enabled() const { return enabled_; } bool enabled() const { return enabled_; }
blink::WebMediaStreamSource::ReadyState state() const { return state_; } blink::WebMediaStreamSource::ReadyState state() const { return state_; }
...@@ -47,6 +48,7 @@ class MockMediaStreamVideoSink : public MediaStreamVideoSink { ...@@ -47,6 +48,7 @@ class MockMediaStreamVideoSink : public MediaStreamVideoSink {
media::VideoFrame::Format format_; media::VideoFrame::Format format_;
blink::WebMediaStreamSource::ReadyState state_; blink::WebMediaStreamSource::ReadyState state_;
gfx::Size frame_size_; gfx::Size frame_size_;
scoped_refptr<media::VideoFrame> last_frame_;
base::WeakPtrFactory<MockMediaStreamVideoSink> weak_factory_; base::WeakPtrFactory<MockMediaStreamVideoSink> weak_factory_;
}; };
......
...@@ -81,12 +81,6 @@ void RTCVideoRenderer::OnReadyStateChanged( ...@@ -81,12 +81,6 @@ void RTCVideoRenderer::OnReadyStateChanged(
RenderSignalingFrame(); RenderSignalingFrame();
} }
void RTCVideoRenderer::OnEnabledChanged(bool enabled) {
DCHECK(message_loop_proxy_->BelongsToCurrentThread());
if (!enabled)
RenderSignalingFrame();
}
void RTCVideoRenderer::OnVideoFrame( void RTCVideoRenderer::OnVideoFrame(
const scoped_refptr<media::VideoFrame>& frame, const scoped_refptr<media::VideoFrame>& frame,
const media::VideoCaptureFormat& format, const media::VideoCaptureFormat& format,
...@@ -108,7 +102,7 @@ void RTCVideoRenderer::OnVideoFrame( ...@@ -108,7 +102,7 @@ void RTCVideoRenderer::OnVideoFrame(
void RTCVideoRenderer::RenderSignalingFrame() { void RTCVideoRenderer::RenderSignalingFrame() {
// This is necessary to make sure audio can play if the video tag src is // This is necessary to make sure audio can play if the video tag src is
// a MediaStream video track that has been rejected, ended or disabled. // a MediaStream video track that has been rejected or ended.
// It also ensure that the renderer don't hold a reference to a real video // It also ensure that the renderer don't hold a reference to a real video
// frame if no more frames are provided. This is since there might be a // frame if no more frames are provided. This is since there might be a
// finite number of available buffers. E.g, video that // finite number of available buffers. E.g, video that
......
...@@ -61,7 +61,6 @@ class CONTENT_EXPORT RTCVideoRenderer ...@@ -61,7 +61,6 @@ class CONTENT_EXPORT RTCVideoRenderer
// VideoTrackSink implementation. Called on the main thread. // VideoTrackSink implementation. Called on the main thread.
virtual void OnReadyStateChanged( virtual void OnReadyStateChanged(
blink::WebMediaStreamSource::ReadyState state) OVERRIDE; blink::WebMediaStreamSource::ReadyState state) OVERRIDE;
virtual void OnEnabledChanged(bool enabled) OVERRIDE;
void RenderSignalingFrame(); void RenderSignalingFrame();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment