Commit 530c4af7 authored by perkj's avatar perkj Committed by Commit bot

Refactor MediaStreamTrack video onmute event.

This changes monitoring of video frames delivered to MediaStreamVideoSource to be started when the source is started and stopped when the source is stopped rather than depend on
that the source object is destroyed since destruction is dependent on when blink runs garbage collection.

It removes the unused MediaStreamVideoTrack::SetMuted and GetMuted.

It reduces the need for a thread hop between the io-thread and the render thread for each monitor intervall by only trigger the monitoring callback on if a change has occured.

It reduces the test time of media_stream_video_source_unittest.cc from more than 7s to around 200ms.

BUG= 404106

Review URL: https://codereview.chromium.org/509873002

Cr-Commit-Position: refs/heads/master@{#292853}
parent 7b4bf4d1
...@@ -20,7 +20,7 @@ MediaStreamTrack* MediaStreamTrack::GetTrack( ...@@ -20,7 +20,7 @@ MediaStreamTrack* MediaStreamTrack::GetTrack(
MediaStreamTrack::MediaStreamTrack( MediaStreamTrack::MediaStreamTrack(
const scoped_refptr<webrtc::MediaStreamTrackInterface>& track, const scoped_refptr<webrtc::MediaStreamTrackInterface>& track,
bool is_local_track) bool is_local_track)
: track_(track), muted_state_(false), is_local_track_(is_local_track) { : track_(track), is_local_track_(is_local_track) {
} }
MediaStreamTrack::~MediaStreamTrack() { MediaStreamTrack::~MediaStreamTrack() {
...@@ -32,16 +32,6 @@ void MediaStreamTrack::SetEnabled(bool enabled) { ...@@ -32,16 +32,6 @@ void MediaStreamTrack::SetEnabled(bool enabled) {
track_->set_enabled(enabled); track_->set_enabled(enabled);
} }
void MediaStreamTrack::SetMutedState(bool muted_state) {
DCHECK(thread_checker_.CalledOnValidThread());
muted_state_ = muted_state;
}
bool MediaStreamTrack::GetMutedState(void) const {
DCHECK(thread_checker_.CalledOnValidThread());
return muted_state_;
}
void MediaStreamTrack::Stop() { void MediaStreamTrack::Stop() {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
// Stop means that a track should be stopped permanently. But // Stop means that a track should be stopped permanently. But
......
...@@ -2,8 +2,8 @@ ...@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_EXTRA_DATA_H_ #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_EXTRA_DATA_H_ #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_H_
#include "base/callback.h" #include "base/callback.h"
#include "base/compiler_specific.h" #include "base/compiler_specific.h"
...@@ -36,9 +36,6 @@ class CONTENT_EXPORT MediaStreamTrack ...@@ -36,9 +36,6 @@ class CONTENT_EXPORT MediaStreamTrack
// If a subclass overrides this method it has to call the base class. // If a subclass overrides this method it has to call the base class.
virtual void SetEnabled(bool enabled); virtual void SetEnabled(bool enabled);
virtual void SetMutedState(bool muted_state);
virtual bool GetMutedState(void) const;
// TODO(xians): Make this pure virtual when Stop[Track] has been // TODO(xians): Make this pure virtual when Stop[Track] has been
// implemented for remote audio tracks. // implemented for remote audio tracks.
virtual void Stop(); virtual void Stop();
...@@ -50,9 +47,6 @@ class CONTENT_EXPORT MediaStreamTrack ...@@ -50,9 +47,6 @@ class CONTENT_EXPORT MediaStreamTrack
protected: protected:
scoped_refptr<webrtc::MediaStreamTrackInterface> track_; scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
// Set to true if the owner MediaStreamSource is not delivering frames.
bool muted_state_;
private: private:
const bool is_local_track_; const bool is_local_track_;
...@@ -63,4 +57,4 @@ class CONTENT_EXPORT MediaStreamTrack ...@@ -63,4 +57,4 @@ class CONTENT_EXPORT MediaStreamTrack
} // namespace content } // namespace content
#endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_EXTRA_DATA_H_ #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_TRACK_H_
...@@ -15,7 +15,6 @@ ...@@ -15,7 +15,6 @@
#include "content/renderer/media/media_stream_constraints_util.h" #include "content/renderer/media/media_stream_constraints_util.h"
#include "content/renderer/media/media_stream_video_track.h" #include "content/renderer/media/media_stream_video_track.h"
#include "content/renderer/media/video_track_adapter.h" #include "content/renderer/media/video_track_adapter.h"
#include "media/base/bind_to_current_loop.h"
namespace content { namespace content {
...@@ -358,7 +357,6 @@ bool MediaStreamVideoSource::IsConstraintSupported(const std::string& name) { ...@@ -358,7 +357,6 @@ bool MediaStreamVideoSource::IsConstraintSupported(const std::string& name) {
MediaStreamVideoSource::MediaStreamVideoSource() MediaStreamVideoSource::MediaStreamVideoSource()
: state_(NEW), : state_(NEW),
muted_state_(false),
track_adapter_(new VideoTrackAdapter( track_adapter_(new VideoTrackAdapter(
ChildProcess::current()->io_message_loop_proxy())), ChildProcess::current()->io_message_loop_proxy())),
weak_factory_(this) { weak_factory_(this) {
...@@ -460,6 +458,7 @@ void MediaStreamVideoSource::DoStopSource() { ...@@ -460,6 +458,7 @@ void MediaStreamVideoSource::DoStopSource() {
DVLOG(3) << "DoStopSource()"; DVLOG(3) << "DoStopSource()";
if (state_ == ENDED) if (state_ == ENDED)
return; return;
track_adapter_->StopFrameMonitoring();
StopSourceImpl(); StopSourceImpl();
state_ = ENDED; state_ = ENDED;
SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
...@@ -535,6 +534,12 @@ void MediaStreamVideoSource::OnStartDone(MediaStreamRequestResult result) { ...@@ -535,6 +534,12 @@ void MediaStreamVideoSource::OnStartDone(MediaStreamRequestResult result) {
DCHECK_EQ(STARTING, state_); DCHECK_EQ(STARTING, state_);
state_ = STARTED; state_ = STARTED;
SetReadyState(blink::WebMediaStreamSource::ReadyStateLive); SetReadyState(blink::WebMediaStreamSource::ReadyStateLive);
track_adapter_->StartFrameMonitoring(
current_format_.frame_rate,
base::Bind(&MediaStreamVideoSource::SetMutedState,
weak_factory_.GetWeakPtr()));
} else { } else {
StopSource(); StopSource();
} }
...@@ -577,15 +582,10 @@ void MediaStreamVideoSource::FinalizeAddTrack() { ...@@ -577,15 +582,10 @@ void MediaStreamVideoSource::FinalizeAddTrack() {
GetConstraintValueAsDouble(it->constraints, GetConstraintValueAsDouble(it->constraints,
kMaxFrameRate, &max_frame_rate); kMaxFrameRate, &max_frame_rate);
VideoTrackAdapter::OnMutedCallback on_mute_callback =
media::BindToCurrentLoop(base::Bind(
&MediaStreamVideoSource::SetMutedState,
weak_factory_.GetWeakPtr()));
track_adapter_->AddTrack(it->track, it->frame_callback, track_adapter_->AddTrack(it->track, it->frame_callback,
max_width, max_height, max_width, max_height,
min_aspect_ratio, max_aspect_ratio, min_aspect_ratio, max_aspect_ratio,
max_frame_rate, current_format_.frame_rate, max_frame_rate);
on_mute_callback);
} }
DVLOG(3) << "FinalizeAddTrack() result " << result; DVLOG(3) << "FinalizeAddTrack() result " << result;
...@@ -611,18 +611,10 @@ void MediaStreamVideoSource::SetReadyState( ...@@ -611,18 +611,10 @@ void MediaStreamVideoSource::SetReadyState(
void MediaStreamVideoSource::SetMutedState(bool muted_state) { void MediaStreamVideoSource::SetMutedState(bool muted_state) {
DVLOG(3) << "MediaStreamVideoSource::SetMutedState state=" << muted_state; DVLOG(3) << "MediaStreamVideoSource::SetMutedState state=" << muted_state;
DCHECK(CalledOnValidThread()); DCHECK(CalledOnValidThread());
if (muted_state != muted_state_) { if (!owner().isNull()) {
muted_state_ = muted_state; owner().setReadyState(muted_state
if (!owner().isNull()) {
owner().setReadyState(muted_state_
? blink::WebMediaStreamSource::ReadyStateMuted ? blink::WebMediaStreamSource::ReadyStateMuted
: blink::WebMediaStreamSource::ReadyStateLive); : blink::WebMediaStreamSource::ReadyStateLive);
}
}
// WebMediaStreamSource doesn't have a muted state, the tracks do.
for (std::vector<MediaStreamVideoTrack*>::iterator it = tracks_.begin();
it != tracks_.end(); ++it) {
(*it)->SetMutedState(muted_state);
} }
} }
......
...@@ -149,7 +149,6 @@ class CONTENT_EXPORT MediaStreamVideoSource ...@@ -149,7 +149,6 @@ class CONTENT_EXPORT MediaStreamVideoSource
void FinalizeAddTrack(); void FinalizeAddTrack();
State state_; State state_;
bool muted_state_;
media::VideoCaptureFormat current_format_; media::VideoCaptureFormat current_format_;
......
...@@ -198,6 +198,10 @@ class MediaStreamVideoSourceTest ...@@ -198,6 +198,10 @@ class MediaStreamVideoSourceTest
MediaStreamVideoSink::RemoveFromVideoTrack(&sink2, track2); MediaStreamVideoSink::RemoveFromVideoTrack(&sink2, track2);
} }
void SetSourceSupportedFormats(const media::VideoCaptureFormats& formats) {
mock_source_->SetSupportedFormats(formats);
}
void ReleaseTrackAndSourceOnAddTrackCallback( void ReleaseTrackAndSourceOnAddTrackCallback(
const blink::WebMediaStreamTrack& track_to_release) { const blink::WebMediaStreamTrack& track_to_release) {
track_to_release_ = track_to_release; track_to_release_ = track_to_release;
...@@ -738,18 +742,25 @@ TEST_F(MediaStreamVideoSourceTest, Use0FpsSupportedFormat) { ...@@ -738,18 +742,25 @@ TEST_F(MediaStreamVideoSourceTest, Use0FpsSupportedFormat) {
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track); MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
} }
// Test that a source producing no frames calls back the MSVCS to tell so, and // Test that a source producing no frames change the source readyState to muted.
// this in turn tells the Track attached. Then start passing frames, and check
// that in a reasonable time frame the muted state turns to false. // that in a reasonable time frame the muted state turns to false.
TEST_F(MediaStreamVideoSourceTest, MutedSource) { TEST_F(MediaStreamVideoSourceTest, MutedSource) {
// Setup the source for support a frame rate of 2000fps in order to test
// the muted event faster. This is since the frame monitoring uses
// PostDelayedTask that is dependent on the source frame rate.
media::VideoCaptureFormats formats;
formats.push_back(media::VideoCaptureFormat(
gfx::Size(640, 480), 2000, media::PIXEL_FORMAT_I420));
SetSourceSupportedFormats(formats);
MockMediaConstraintFactory factory; MockMediaConstraintFactory factory;
blink::WebMediaStreamTrack track = blink::WebMediaStreamTrack track =
CreateTrackAndStartSource(factory.CreateWebMediaConstraints(), CreateTrackAndStartSource(factory.CreateWebMediaConstraints(),
640, 480, 30); 640, 480, 2000);
MockMediaStreamVideoSink sink; MockMediaStreamVideoSink sink;
MediaStreamVideoSink::AddToVideoTrack(&sink, sink.GetDeliverFrameCB(), track); MediaStreamVideoSink::AddToVideoTrack(&sink, sink.GetDeliverFrameCB(), track);
EXPECT_EQ(MediaStreamTrack::GetTrack(track)->GetMutedState(), false); EXPECT_EQ(track.source().readyState(),
blink::WebMediaStreamSource::ReadyStateLive);
base::RunLoop run_loop; base::RunLoop run_loop;
base::Closure quit_closure = run_loop.QuitClosure(); base::Closure quit_closure = run_loop.QuitClosure();
...@@ -758,67 +769,22 @@ TEST_F(MediaStreamVideoSourceTest, MutedSource) { ...@@ -758,67 +769,22 @@ TEST_F(MediaStreamVideoSourceTest, MutedSource) {
.WillOnce(DoAll(SaveArg<0>(&muted_state), RunClosure(quit_closure))); .WillOnce(DoAll(SaveArg<0>(&muted_state), RunClosure(quit_closure)));
run_loop.Run(); run_loop.Run();
EXPECT_EQ(muted_state, true); EXPECT_EQ(muted_state, true);
// TODO(mcasas): When added, check |track|'s (WebMediaStreamTrack) Muted
// attribute, should be true. In the meantime, check the MediaStreamTrack's.
EXPECT_EQ(MediaStreamTrack::GetTrack(track)->GetMutedState(), true);
EXPECT_CALL(*mock_source(), DoSetMutedState(_))
.WillOnce(DoAll(SaveArg<0>(&muted_state), RunClosure(quit_closure)));
// Mock frame delivery happens asynchronously, not according to the configured
// frame rate, potentially many frames can pass before the muted state is
// flipped. |kMaxFrameCount| is used as a reasonable high bound of this value.
const int kMaxFrameCount = 10000;
int i = 0;
while (muted_state != false || ++i > kMaxFrameCount)
DeliverVideoFrameAndWaitForRenderer(640, 480, &sink);
EXPECT_EQ(muted_state, false);
EXPECT_LT(i, kMaxFrameCount);
EXPECT_EQ(MediaStreamTrack::GetTrack(track)->GetMutedState(), false);
MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track); EXPECT_EQ(track.source().readyState(),
} blink::WebMediaStreamSource::ReadyStateMuted);
// Test that a source producing no frames calls back the MSVCS to tell so, and base::RunLoop run_loop2;
// this in turn tells all the Tracks attached. base::Closure quit_closure2 = run_loop2.QuitClosure();
TEST_F(MediaStreamVideoSourceTest, MutedSourceWithTwoTracks) {
MockMediaConstraintFactory factory1;
blink::WebMediaStreamTrack track1 =
CreateTrackAndStartSource(factory1.CreateWebMediaConstraints(),
MediaStreamVideoSource::kDefaultWidth,
MediaStreamVideoSource::kDefaultHeight,
30);
MockMediaConstraintFactory factory2;
factory2.AddMandatory(MediaStreamVideoSource::kMaxFrameRate, 15);
blink::WebMediaStreamTrack track2 = CreateTrack(
"123", factory2.CreateWebMediaConstraints());
EXPECT_EQ(0, NumberOfFailedConstraintsCallbacks());
MockMediaStreamVideoSink sink1;
MediaStreamVideoSink::AddToVideoTrack(&sink1, sink1.GetDeliverFrameCB(),
track1);
EXPECT_EQ(MediaStreamTrack::GetTrack(track1)->GetMutedState(), false);
MockMediaStreamVideoSink sink2;
MediaStreamVideoSink::AddToVideoTrack(&sink2, sink2.GetDeliverFrameCB(),
track2);
EXPECT_EQ(MediaStreamTrack::GetTrack(track2)->GetMutedState(), false);
base::RunLoop run_loop;
base::Closure quit_closure = run_loop.QuitClosure();
bool muted_state = false;
EXPECT_CALL(*mock_source(), DoSetMutedState(_)) EXPECT_CALL(*mock_source(), DoSetMutedState(_))
.WillOnce(DoAll(SaveArg<0>(&muted_state), RunClosure(quit_closure))); .WillOnce(DoAll(SaveArg<0>(&muted_state), RunClosure(quit_closure2)));
run_loop.Run(); DeliverVideoFrameAndWaitForRenderer(640, 480, &sink);
EXPECT_EQ(muted_state, true); run_loop2.Run();
// TODO(mcasas): When added, check |track|'s (WebMediaStreamTrack) Muted EXPECT_EQ(muted_state, false);
// attribute, should be true. In the meantime, check the MediaStreamTrack's. EXPECT_EQ(track.source().readyState(),
EXPECT_EQ(MediaStreamTrack::GetTrack(track1)->GetMutedState(), true); blink::WebMediaStreamSource::ReadyStateLive);
EXPECT_EQ(MediaStreamTrack::GetTrack(track2)->GetMutedState(), true);
MediaStreamVideoSink::RemoveFromVideoTrack(&sink1, track1); MediaStreamVideoSink::RemoveFromVideoTrack(&sink, track);
MediaStreamVideoSink::RemoveFromVideoTrack(&sink2, track2);
} }
} // namespace content } // namespace content
...@@ -263,14 +263,4 @@ void MediaStreamVideoTrack::OnReadyStateChanged( ...@@ -263,14 +263,4 @@ void MediaStreamVideoTrack::OnReadyStateChanged(
} }
} }
void MediaStreamVideoTrack::SetMutedState(bool muted_state) {
DCHECK(thread_checker_.CalledOnValidThread());
muted_state_ = muted_state;
}
bool MediaStreamVideoTrack::GetMutedState(void) const {
DCHECK(thread_checker_.CalledOnValidThread());
return muted_state_;
}
} // namespace content } // namespace content
...@@ -50,8 +50,6 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack { ...@@ -50,8 +50,6 @@ class CONTENT_EXPORT MediaStreamVideoTrack : public MediaStreamTrack {
virtual ~MediaStreamVideoTrack(); virtual ~MediaStreamVideoTrack();
virtual void SetEnabled(bool enabled) OVERRIDE; virtual void SetEnabled(bool enabled) OVERRIDE;
virtual void SetMutedState(bool state) OVERRIDE;
virtual bool GetMutedState(void) const OVERRIDE;
virtual void Stop() OVERRIDE; virtual void Stop() OVERRIDE;
......
...@@ -44,7 +44,7 @@ class MockMediaStreamVideoSource : public MediaStreamVideoSource { ...@@ -44,7 +44,7 @@ class MockMediaStreamVideoSource : public MediaStreamVideoSource {
int max_requested_width() const { return max_requested_width_; } int max_requested_width() const { return max_requested_width_; }
double max_requested_frame_rate() const { return max_requested_frame_rate_; } double max_requested_frame_rate() const { return max_requested_frame_rate_; }
void SetMutedState(bool muted_state) { virtual void SetMutedState(bool muted_state) OVERRIDE {
MediaStreamVideoSource::SetMutedState(muted_state); MediaStreamVideoSource::SetMutedState(muted_state);
DoSetMutedState(muted_state); DoSetMutedState(muted_state);
} }
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "base/debug/trace_event.h" #include "base/debug/trace_event.h"
#include "base/location.h" #include "base/location.h"
#include "base/metrics/histogram.h" #include "base/metrics/histogram.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_util.h" #include "media/base/video_util.h"
namespace content { namespace content {
...@@ -233,8 +234,9 @@ bool VideoTrackAdapter::VideoFrameResolutionAdapter::MaybeDropFrame( ...@@ -233,8 +234,9 @@ bool VideoTrackAdapter::VideoFrameResolutionAdapter::MaybeDropFrame(
// frame rate is known and is lower than max. // frame rate is known and is lower than max.
if (max_frame_rate_ == 0.0f || if (max_frame_rate_ == 0.0f ||
(source_frame_rate > 0 && (source_frame_rate > 0 &&
source_frame_rate <= max_frame_rate_)) source_frame_rate <= max_frame_rate_)) {
return false; return false;
}
base::TimeDelta delta = frame->timestamp() - last_time_stamp_; base::TimeDelta delta = frame->timestamp() - last_time_stamp_;
if (delta.InMilliseconds() < kMinTimeInMsBetweenFrames) { if (delta.InMilliseconds() < kMinTimeInMsBetweenFrames) {
...@@ -334,6 +336,8 @@ VideoTrackAdapter::VideoTrackAdapter( ...@@ -334,6 +336,8 @@ VideoTrackAdapter::VideoTrackAdapter(
const scoped_refptr<base::MessageLoopProxy>& io_message_loop) const scoped_refptr<base::MessageLoopProxy>& io_message_loop)
: io_message_loop_(io_message_loop), : io_message_loop_(io_message_loop),
renderer_task_runner_(base::MessageLoopProxy::current()), renderer_task_runner_(base::MessageLoopProxy::current()),
monitoring_frame_rate_(false),
muted_state_(false),
frame_counter_(0), frame_counter_(0),
source_frame_rate_(0.0f) { source_frame_rate_(0.0f) {
DCHECK(io_message_loop_.get()); DCHECK(io_message_loop_.get());
...@@ -352,16 +356,9 @@ void VideoTrackAdapter::AddTrack( ...@@ -352,16 +356,9 @@ void VideoTrackAdapter::AddTrack(
int max_height, int max_height,
double min_aspect_ratio, double min_aspect_ratio,
double max_aspect_ratio, double max_aspect_ratio,
double max_frame_rate, double max_frame_rate) {
double source_frame_rate,
const OnMutedCallback& on_muted_state_callback) {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
// Track monitoring should be scheduled before AddTrackOnIO() so it can find
// |adapters_| empty.
io_message_loop_->PostTask(
FROM_HERE,
base::Bind(&VideoTrackAdapter::StartTrackMonitoringOnIO,
this, on_muted_state_callback, source_frame_rate));
io_message_loop_->PostTask( io_message_loop_->PostTask(
FROM_HERE, FROM_HERE,
base::Bind(&VideoTrackAdapter::AddTrackOnIO, base::Bind(&VideoTrackAdapter::AddTrackOnIO,
...@@ -405,13 +402,28 @@ void VideoTrackAdapter::RemoveTrack(const MediaStreamVideoTrack* track) { ...@@ -405,13 +402,28 @@ void VideoTrackAdapter::RemoveTrack(const MediaStreamVideoTrack* track) {
base::Bind(&VideoTrackAdapter::RemoveTrackOnIO, this, track)); base::Bind(&VideoTrackAdapter::RemoveTrackOnIO, this, track));
} }
void VideoTrackAdapter::StartTrackMonitoringOnIO( void VideoTrackAdapter::StartFrameMonitoring(
const OnMutedCallback& on_muted_state_callback, double source_frame_rate,
const OnMutedCallback& on_muted_callback) {
DCHECK(thread_checker_.CalledOnValidThread());
VideoTrackAdapter::OnMutedCallback bound_on_muted_callback =
media::BindToCurrentLoop(on_muted_callback);
io_message_loop_->PostTask(
FROM_HERE,
base::Bind(&VideoTrackAdapter::StartFrameMonitoringOnIO,
this, bound_on_muted_callback, source_frame_rate));
}
void VideoTrackAdapter::StartFrameMonitoringOnIO(
const OnMutedCallback& on_muted_callback,
double source_frame_rate) { double source_frame_rate) {
DCHECK(io_message_loop_->BelongsToCurrentThread()); DCHECK(io_message_loop_->BelongsToCurrentThread());
// Only trigger monitoring for the first Track. DCHECK(!monitoring_frame_rate_);
if (!adapters_.empty())
return; monitoring_frame_rate_ = true;
// If the source does not know the frame rate, set one by default. // If the source does not know the frame rate, set one by default.
if (source_frame_rate == 0.0f) if (source_frame_rate == 0.0f)
source_frame_rate = MediaStreamVideoSource::kDefaultFrameRate; source_frame_rate = MediaStreamVideoSource::kDefaultFrameRate;
...@@ -420,11 +432,23 @@ void VideoTrackAdapter::StartTrackMonitoringOnIO( ...@@ -420,11 +432,23 @@ void VideoTrackAdapter::StartTrackMonitoringOnIO(
<< (kFirstFrameTimeoutInFrameIntervals / source_frame_rate_) << "s"; << (kFirstFrameTimeoutInFrameIntervals / source_frame_rate_) << "s";
io_message_loop_->PostDelayedTask(FROM_HERE, io_message_loop_->PostDelayedTask(FROM_HERE,
base::Bind(&VideoTrackAdapter::CheckFramesReceivedOnIO, this, base::Bind(&VideoTrackAdapter::CheckFramesReceivedOnIO, this,
on_muted_state_callback, frame_counter_), on_muted_callback, frame_counter_),
base::TimeDelta::FromSecondsD(kFirstFrameTimeoutInFrameIntervals / base::TimeDelta::FromSecondsD(kFirstFrameTimeoutInFrameIntervals /
source_frame_rate_)); source_frame_rate_));
} }
void VideoTrackAdapter::StopFrameMonitoring() {
DCHECK(thread_checker_.CalledOnValidThread());
io_message_loop_->PostTask(
FROM_HERE,
base::Bind(&VideoTrackAdapter::StopFrameMonitoringOnIO, this));
}
void VideoTrackAdapter::StopFrameMonitoringOnIO() {
DCHECK(io_message_loop_->BelongsToCurrentThread());
monitoring_frame_rate_ = false;
}
void VideoTrackAdapter::RemoveTrackOnIO(const MediaStreamVideoTrack* track) { void VideoTrackAdapter::RemoveTrackOnIO(const MediaStreamVideoTrack* track) {
DCHECK(io_message_loop_->BelongsToCurrentThread()); DCHECK(io_message_loop_->BelongsToCurrentThread());
for (FrameAdapters::iterator it = adapters_.begin(); for (FrameAdapters::iterator it = adapters_.begin();
...@@ -454,12 +478,19 @@ void VideoTrackAdapter::CheckFramesReceivedOnIO( ...@@ -454,12 +478,19 @@ void VideoTrackAdapter::CheckFramesReceivedOnIO(
const OnMutedCallback& set_muted_state_callback, const OnMutedCallback& set_muted_state_callback,
uint64 old_frame_counter_snapshot) { uint64 old_frame_counter_snapshot) {
DCHECK(io_message_loop_->BelongsToCurrentThread()); DCHECK(io_message_loop_->BelongsToCurrentThread());
if (!monitoring_frame_rate_)
return;
DVLOG_IF(1, old_frame_counter_snapshot == frame_counter_) DVLOG_IF(1, old_frame_counter_snapshot == frame_counter_)
<< "No frames have passed, setting source as Muted."; << "No frames have passed, setting source as Muted.";
set_muted_state_callback.Run(old_frame_counter_snapshot == frame_counter_);
// Rearm the monitoring while there are active Tracks, i.e. as long as the bool muted_state = old_frame_counter_snapshot == frame_counter_;
// owner MediaStreamSource is active. if (muted_state_ != muted_state) {
set_muted_state_callback.Run(muted_state);
muted_state_ = muted_state;
}
io_message_loop_->PostDelayedTask(FROM_HERE, io_message_loop_->PostDelayedTask(FROM_HERE,
base::Bind(&VideoTrackAdapter::CheckFramesReceivedOnIO, this, base::Bind(&VideoTrackAdapter::CheckFramesReceivedOnIO, this,
set_muted_state_callback, frame_counter_), set_muted_state_callback, frame_counter_),
......
...@@ -43,9 +43,7 @@ class VideoTrackAdapter ...@@ -43,9 +43,7 @@ class VideoTrackAdapter
int max_width, int max_height, int max_width, int max_height,
double min_aspect_ratio, double min_aspect_ratio,
double max_aspect_ratio, double max_aspect_ratio,
double max_frame_rate, double max_frame_rate);
double source_frame_rate,
const OnMutedCallback& on_muted_state_callback);
void RemoveTrack(const MediaStreamVideoTrack* track); void RemoveTrack(const MediaStreamVideoTrack* track);
// Delivers |frame| to all tracks that have registered a callback. // Delivers |frame| to all tracks that have registered a callback.
...@@ -60,6 +58,13 @@ class VideoTrackAdapter ...@@ -60,6 +58,13 @@ class VideoTrackAdapter
return io_message_loop_; return io_message_loop_;
} }
// Start monitor that frames are delivered to this object. I.E, that
// |DeliverFrameOnIO| is called with a frame rate of |source_frame_rate|.
// |on_muted_callback| is triggered on the main render thread.
void StartFrameMonitoring(double source_frame_rate,
const OnMutedCallback& on_muted_callback);
void StopFrameMonitoring();
private: private:
virtual ~VideoTrackAdapter(); virtual ~VideoTrackAdapter();
friend class base::RefCountedThreadSafe<VideoTrackAdapter>; friend class base::RefCountedThreadSafe<VideoTrackAdapter>;
...@@ -73,9 +78,10 @@ class VideoTrackAdapter ...@@ -73,9 +78,10 @@ class VideoTrackAdapter
double max_frame_rate); double max_frame_rate);
void RemoveTrackOnIO(const MediaStreamVideoTrack* track); void RemoveTrackOnIO(const MediaStreamVideoTrack* track);
void StartTrackMonitoringOnIO( void StartFrameMonitoringOnIO(
const OnMutedCallback& on_muted_state_callback, const OnMutedCallback& on_muted_state_callback,
double source_frame_rate); double source_frame_rate);
void StopFrameMonitoringOnIO();
// Compare |frame_counter_snapshot| with the current |frame_counter_|, and // Compare |frame_counter_snapshot| with the current |frame_counter_|, and
// inform of the situation (muted, not muted) via |set_muted_state_callback|. // inform of the situation (muted, not muted) via |set_muted_state_callback|.
...@@ -99,6 +105,14 @@ class VideoTrackAdapter ...@@ -99,6 +105,14 @@ class VideoTrackAdapter
FrameAdapters; FrameAdapters;
FrameAdapters adapters_; FrameAdapters adapters_;
// Set to true if frame monitoring has been started. It is only accessed on
// the IO-thread.
bool monitoring_frame_rate_;
// Keeps track of it frames have been received. It is only accessed on the
// IO-thread.
bool muted_state_;
// Running frame counter, accessed on the IO-thread. // Running frame counter, accessed on the IO-thread.
uint64 frame_counter_; uint64 frame_counter_;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment