Commit 873da263 authored by acolwell@chromium.org's avatar acolwell@chromium.org

Move VideoDecoder initialization into VideoRendererBase to simplify...

Move VideoDecoder initialization into VideoRendererBase to simplify implementing codec config changes during playback.

BUG=141533
TEST=Existing PipelineTest.*, VideoRendererBaseTest.*

Review URL: https://chromiumcodereview.appspot.com/10836167

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@151132 0039d316-1c4b-4281-b951-d872f2087c98
parent e516a3c1
...@@ -2434,7 +2434,7 @@ WebMediaPlayer* RenderViewImpl::createMediaPlayer( ...@@ -2434,7 +2434,7 @@ WebMediaPlayer* RenderViewImpl::createMediaPlayer(
GpuChannelHost* gpu_channel_host = GpuChannelHost* gpu_channel_host =
RenderThreadImpl::current()->EstablishGpuChannelSync( RenderThreadImpl::current()->EstablishGpuChannelSync(
content::CAUSE_FOR_GPU_LAUNCH_VIDEODECODEACCELERATOR_INITIALIZE); content::CAUSE_FOR_GPU_LAUNCH_VIDEODECODEACCELERATOR_INITIALIZE);
collection->AddVideoDecoder(new media::GpuVideoDecoder( collection->GetVideoDecoders()->push_back(new media::GpuVideoDecoder(
message_loop_factory->GetMessageLoop("GpuVideoDecoder"), message_loop_factory->GetMessageLoop("GpuVideoDecoder"),
factories_loop, factories_loop,
new RendererGpuVideoDecoderFactories( new RendererGpuVideoDecoderFactories(
......
...@@ -29,10 +29,6 @@ void FilterCollection::AddAudioDecoder(AudioDecoder* audio_decoder) { ...@@ -29,10 +29,6 @@ void FilterCollection::AddAudioDecoder(AudioDecoder* audio_decoder) {
audio_decoders_.push_back(audio_decoder); audio_decoders_.push_back(audio_decoder);
} }
void FilterCollection::AddVideoDecoder(VideoDecoder* video_decoder) {
video_decoders_.push_back(video_decoder);
}
void FilterCollection::AddAudioRenderer(AudioRenderer* audio_renderer) { void FilterCollection::AddAudioRenderer(AudioRenderer* audio_renderer) {
audio_renderers_.push_back(audio_renderer); audio_renderers_.push_back(audio_renderer);
} }
...@@ -41,13 +37,6 @@ void FilterCollection::AddVideoRenderer(VideoRenderer* video_renderer) { ...@@ -41,13 +37,6 @@ void FilterCollection::AddVideoRenderer(VideoRenderer* video_renderer) {
video_renderers_.push_back(video_renderer); video_renderers_.push_back(video_renderer);
} }
bool FilterCollection::IsEmpty() const {
return audio_decoders_.empty() &&
video_decoders_.empty() &&
audio_renderers_.empty() &&
video_renderers_.empty();
}
void FilterCollection::Clear() { void FilterCollection::Clear() {
audio_decoders_.clear(); audio_decoders_.clear();
video_decoders_.clear(); video_decoders_.clear();
...@@ -64,15 +53,6 @@ void FilterCollection::SelectAudioDecoder(scoped_refptr<AudioDecoder>* out) { ...@@ -64,15 +53,6 @@ void FilterCollection::SelectAudioDecoder(scoped_refptr<AudioDecoder>* out) {
audio_decoders_.pop_front(); audio_decoders_.pop_front();
} }
void FilterCollection::SelectVideoDecoder(scoped_refptr<VideoDecoder>* out) {
if (video_decoders_.empty()) {
*out = NULL;
return;
}
*out = video_decoders_.front();
video_decoders_.pop_front();
}
void FilterCollection::SelectAudioRenderer(scoped_refptr<AudioRenderer>* out) { void FilterCollection::SelectAudioRenderer(scoped_refptr<AudioRenderer>* out) {
if (audio_renderers_.empty()) { if (audio_renderers_.empty()) {
*out = NULL; *out = NULL;
...@@ -91,4 +71,9 @@ void FilterCollection::SelectVideoRenderer(scoped_refptr<VideoRenderer>* out) { ...@@ -91,4 +71,9 @@ void FilterCollection::SelectVideoRenderer(scoped_refptr<VideoRenderer>* out) {
video_renderers_.pop_front(); video_renderers_.pop_front();
} }
FilterCollection::VideoDecoderList*
FilterCollection::GetVideoDecoders() {
return &video_decoders_;
}
} // namespace media } // namespace media
...@@ -25,6 +25,8 @@ class VideoRenderer; ...@@ -25,6 +25,8 @@ class VideoRenderer;
// http://crbug.com/110800 // http://crbug.com/110800
class MEDIA_EXPORT FilterCollection { class MEDIA_EXPORT FilterCollection {
public: public:
typedef std::list<scoped_refptr<VideoDecoder> > VideoDecoderList;
FilterCollection(); FilterCollection();
~FilterCollection(); ~FilterCollection();
...@@ -34,13 +36,9 @@ class MEDIA_EXPORT FilterCollection { ...@@ -34,13 +36,9 @@ class MEDIA_EXPORT FilterCollection {
// Adds a filter to the collection. // Adds a filter to the collection.
void AddAudioDecoder(AudioDecoder* audio_decoder); void AddAudioDecoder(AudioDecoder* audio_decoder);
void AddVideoDecoder(VideoDecoder* video_decoder);
void AddAudioRenderer(AudioRenderer* audio_renderer); void AddAudioRenderer(AudioRenderer* audio_renderer);
void AddVideoRenderer(VideoRenderer* video_renderer); void AddVideoRenderer(VideoRenderer* video_renderer);
// Is the collection empty?
bool IsEmpty() const;
// Remove remaining filters. // Remove remaining filters.
void Clear(); void Clear();
...@@ -49,14 +47,15 @@ class MEDIA_EXPORT FilterCollection { ...@@ -49,14 +47,15 @@ class MEDIA_EXPORT FilterCollection {
// If a filter is returned it is removed from the collection. // If a filter is returned it is removed from the collection.
// Filters are selected in FIFO order. // Filters are selected in FIFO order.
void SelectAudioDecoder(scoped_refptr<AudioDecoder>* out); void SelectAudioDecoder(scoped_refptr<AudioDecoder>* out);
void SelectVideoDecoder(scoped_refptr<VideoDecoder>* out);
void SelectAudioRenderer(scoped_refptr<AudioRenderer>* out); void SelectAudioRenderer(scoped_refptr<AudioRenderer>* out);
void SelectVideoRenderer(scoped_refptr<VideoRenderer>* out); void SelectVideoRenderer(scoped_refptr<VideoRenderer>* out);
VideoDecoderList* GetVideoDecoders();
private: private:
scoped_refptr<Demuxer> demuxer_; scoped_refptr<Demuxer> demuxer_;
std::list<scoped_refptr<AudioDecoder> > audio_decoders_; std::list<scoped_refptr<AudioDecoder> > audio_decoders_;
std::list<scoped_refptr<VideoDecoder> > video_decoders_; VideoDecoderList video_decoders_;
std::list<scoped_refptr<AudioRenderer> > audio_renderers_; std::list<scoped_refptr<AudioRenderer> > audio_renderers_;
std::list<scoped_refptr<VideoRenderer> > video_renderers_; std::list<scoped_refptr<VideoRenderer> > video_renderers_;
......
...@@ -20,54 +20,22 @@ class FilterCollectionTest : public ::testing::Test { ...@@ -20,54 +20,22 @@ class FilterCollectionTest : public ::testing::Test {
DISALLOW_COPY_AND_ASSIGN(FilterCollectionTest); DISALLOW_COPY_AND_ASSIGN(FilterCollectionTest);
}; };
TEST_F(FilterCollectionTest, TestIsEmptyAndClear) {
EXPECT_TRUE(collection_.IsEmpty());
collection_.AddAudioDecoder(mock_filters_.audio_decoder());
EXPECT_FALSE(collection_.IsEmpty());
collection_.Clear();
EXPECT_TRUE(collection_.IsEmpty());
}
TEST_F(FilterCollectionTest, SelectXXXMethods) { TEST_F(FilterCollectionTest, SelectXXXMethods) {
scoped_refptr<AudioDecoder> audio_decoder; scoped_refptr<AudioDecoder> audio_decoder;
scoped_refptr<VideoDecoder> video_decoder;
collection_.AddVideoDecoder(mock_filters_.video_decoder());
EXPECT_FALSE(collection_.IsEmpty());
// Verify that the video decoder will not be returned if we
// ask for a different type.
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_FALSE(audio_decoder); EXPECT_FALSE(audio_decoder);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that we can actually retrieve the video decoder // Add an audio decoder.
// and that it is removed from the collection.
collection_.SelectVideoDecoder(&video_decoder);
EXPECT_TRUE(video_decoder);
EXPECT_TRUE(collection_.IsEmpty());
// Add a video decoder and audio decoder.
collection_.AddVideoDecoder(mock_filters_.video_decoder());
collection_.AddAudioDecoder(mock_filters_.audio_decoder()); collection_.AddAudioDecoder(mock_filters_.audio_decoder());
// Verify that we can select the audio decoder. // Verify that we can select the audio decoder.
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder); EXPECT_TRUE(audio_decoder);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that we can't select it again since only one has been added. // Verify that we can't select it again since only one has been added.
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_FALSE(audio_decoder); EXPECT_FALSE(audio_decoder);
// Verify that we can select the video decoder and that doing so will
// empty the collection again.
collection_.SelectVideoDecoder(&video_decoder);
EXPECT_TRUE(collection_.IsEmpty());
} }
TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) { TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) {
...@@ -83,13 +51,11 @@ TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) { ...@@ -83,13 +51,11 @@ TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) {
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder); EXPECT_TRUE(audio_decoder);
EXPECT_EQ(audio_decoder, audio_decoder_a); EXPECT_EQ(audio_decoder, audio_decoder_a);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that second SelectAudioDecoder() returns audio_decoder_b. // Verify that second SelectAudioDecoder() returns audio_decoder_b.
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder); EXPECT_TRUE(audio_decoder);
EXPECT_EQ(audio_decoder, audio_decoder_b); EXPECT_EQ(audio_decoder, audio_decoder_b);
EXPECT_TRUE(collection_.IsEmpty());
// Verify that third SelectAudioDecoder() returns nothing. // Verify that third SelectAudioDecoder() returns nothing.
collection_.SelectAudioDecoder(&audio_decoder); collection_.SelectAudioDecoder(&audio_decoder);
......
...@@ -69,8 +69,6 @@ const char* MediaLog::PipelineStateToString(Pipeline::State state) { ...@@ -69,8 +69,6 @@ const char* MediaLog::PipelineStateToString(Pipeline::State state) {
return "initAudioDecoder"; return "initAudioDecoder";
case Pipeline::kInitAudioRenderer: case Pipeline::kInitAudioRenderer:
return "initAudioRenderer"; return "initAudioRenderer";
case Pipeline::kInitVideoDecoder:
return "initVideoDecoder";
case Pipeline::kInitVideoRenderer: case Pipeline::kInitVideoRenderer:
return "initVideoRenderer"; return "initVideoRenderer";
case Pipeline::kPausing: case Pipeline::kPausing:
......
...@@ -77,7 +77,7 @@ MockFilterCollection::~MockFilterCollection() {} ...@@ -77,7 +77,7 @@ MockFilterCollection::~MockFilterCollection() {}
scoped_ptr<FilterCollection> MockFilterCollection::Create() { scoped_ptr<FilterCollection> MockFilterCollection::Create() {
scoped_ptr<FilterCollection> collection(new FilterCollection()); scoped_ptr<FilterCollection> collection(new FilterCollection());
collection->SetDemuxer(demuxer_); collection->SetDemuxer(demuxer_);
collection->AddVideoDecoder(video_decoder_); collection->GetVideoDecoders()->push_back(video_decoder_);
collection->AddAudioDecoder(audio_decoder_); collection->AddAudioDecoder(audio_decoder_);
collection->AddVideoRenderer(video_renderer_); collection->AddVideoRenderer(video_renderer_);
collection->AddAudioRenderer(audio_renderer_); collection->AddAudioRenderer(audio_renderer_);
......
...@@ -137,21 +137,23 @@ class MockVideoRenderer : public VideoRenderer { ...@@ -137,21 +137,23 @@ class MockVideoRenderer : public VideoRenderer {
MockVideoRenderer(); MockVideoRenderer();
// VideoRenderer implementation. // VideoRenderer implementation.
MOCK_METHOD9(Initialize, void(const scoped_refptr<VideoDecoder>& decoder, MOCK_METHOD10(Initialize, void(const scoped_refptr<DemuxerStream>& stream,
const PipelineStatusCB& init_cb, const VideoDecoderList& decoders,
const StatisticsCB& statistics_cb, const PipelineStatusCB& init_cb,
const TimeCB& time_cb, const StatisticsCB& statistics_cb,
const NaturalSizeChangedCB& size_changed_cb, const TimeCB& time_cb,
const base::Closure& ended_cb, const NaturalSizeChangedCB& size_changed_cb,
const PipelineStatusCB& error_cb, const base::Closure& ended_cb,
const TimeDeltaCB& get_time_cb, const PipelineStatusCB& error_cb,
const TimeDeltaCB& get_duration_cb)); const TimeDeltaCB& get_time_cb,
const TimeDeltaCB& get_duration_cb));
MOCK_METHOD1(Play, void(const base::Closure& callback)); MOCK_METHOD1(Play, void(const base::Closure& callback));
MOCK_METHOD1(Pause, void(const base::Closure& callback)); MOCK_METHOD1(Pause, void(const base::Closure& callback));
MOCK_METHOD1(Flush, void(const base::Closure& callback)); MOCK_METHOD1(Flush, void(const base::Closure& callback));
MOCK_METHOD2(Preroll, void(base::TimeDelta time, const PipelineStatusCB& cb)); MOCK_METHOD2(Preroll, void(base::TimeDelta time, const PipelineStatusCB& cb));
MOCK_METHOD1(Stop, void(const base::Closure& callback)); MOCK_METHOD1(Stop, void(const base::Closure& callback));
MOCK_METHOD1(SetPlaybackRate, void(float playback_rate)); MOCK_METHOD1(SetPlaybackRate, void(float playback_rate));
MOCK_METHOD0(PrepareForShutdownHack, void());
protected: protected:
virtual ~MockVideoRenderer(); virtual ~MockVideoRenderer();
......
...@@ -63,7 +63,6 @@ media::PipelineStatus PipelineStatusNotification::status() { ...@@ -63,7 +63,6 @@ media::PipelineStatus PipelineStatusNotification::status() {
struct Pipeline::PipelineInitState { struct Pipeline::PipelineInitState {
scoped_refptr<AudioDecoder> audio_decoder; scoped_refptr<AudioDecoder> audio_decoder;
scoped_refptr<VideoDecoder> video_decoder;
}; };
Pipeline::Pipeline(MessageLoop* message_loop, MediaLog* media_log) Pipeline::Pipeline(MessageLoop* message_loop, MediaLog* media_log)
...@@ -584,14 +583,8 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) { ...@@ -584,14 +583,8 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
DCHECK(message_loop_->BelongsToCurrentThread()); DCHECK(message_loop_->BelongsToCurrentThread());
if (last_stage_status != PIPELINE_OK) { if (last_stage_status != PIPELINE_OK) {
// Currently only VideoDecoders have a recoverable error code. SetError(last_stage_status);
if (state_ == kInitVideoDecoder && return;
last_stage_status == DECODER_ERROR_NOT_SUPPORTED) {
state_ = kInitAudioRenderer;
} else {
SetError(last_stage_status);
return;
}
} }
// If we have received the stop or error signal, return immediately. // If we have received the stop or error signal, return immediately.
...@@ -601,7 +594,6 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) { ...@@ -601,7 +594,6 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
DCHECK(state_ == kInitDemuxer || DCHECK(state_ == kInitDemuxer ||
state_ == kInitAudioDecoder || state_ == kInitAudioDecoder ||
state_ == kInitAudioRenderer || state_ == kInitAudioRenderer ||
state_ == kInitVideoDecoder ||
state_ == kInitVideoRenderer); state_ == kInitVideoRenderer);
// Demuxer created, create audio decoder. // Demuxer created, create audio decoder.
...@@ -624,18 +616,10 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) { ...@@ -624,18 +616,10 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
} }
} }
// Assuming audio renderer was created, create video decoder. // Assuming audio renderer was created, create video renderer.
if (state_ == kInitAudioRenderer) { if (state_ == kInitAudioRenderer) {
// Then perform the stage of initialization, i.e. initialize video decoder.
SetState(kInitVideoDecoder);
if (InitializeVideoDecoder(demuxer_))
return;
}
// Assuming video decoder was created, create video renderer.
if (state_ == kInitVideoDecoder) {
SetState(kInitVideoRenderer); SetState(kInitVideoRenderer);
if (InitializeVideoRenderer(pipeline_init_state_->video_decoder)) { if (InitializeVideoRenderer(demuxer_->GetStream(DemuxerStream::VIDEO))) {
base::AutoLock auto_lock(lock_); base::AutoLock auto_lock(lock_);
has_video_ = true; has_video_ = true;
return; return;
...@@ -683,10 +667,8 @@ void Pipeline::StopTask(const base::Closure& stop_cb) { ...@@ -683,10 +667,8 @@ void Pipeline::StopTask(const base::Closure& stop_cb) {
return; return;
} }
if (video_decoder_) { if (video_renderer_)
video_decoder_->PrepareForShutdownHack(); video_renderer_->PrepareForShutdownHack();
video_decoder_ = NULL;
}
if (tearing_down_ && status_ != PIPELINE_OK) { if (tearing_down_ && status_ != PIPELINE_OK) {
// If we are stopping due to SetError(), stop normally instead of // If we are stopping due to SetError(), stop normally instead of
...@@ -970,7 +952,6 @@ void Pipeline::TeardownStateTransitionTask() { ...@@ -970,7 +952,6 @@ void Pipeline::TeardownStateTransitionTask() {
case kInitDemuxer: case kInitDemuxer:
case kInitAudioDecoder: case kInitAudioDecoder:
case kInitAudioRenderer: case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer: case kInitVideoRenderer:
case kSeeking: case kSeeking:
case kStarting: case kStarting:
...@@ -1064,34 +1045,6 @@ bool Pipeline::InitializeAudioDecoder( ...@@ -1064,34 +1045,6 @@ bool Pipeline::InitializeAudioDecoder(
return true; return true;
} }
bool Pipeline::InitializeVideoDecoder(
const scoped_refptr<Demuxer>& demuxer) {
DCHECK(message_loop_->BelongsToCurrentThread());
DCHECK(IsPipelineOk());
DCHECK(demuxer);
scoped_refptr<DemuxerStream> stream =
demuxer->GetStream(DemuxerStream::VIDEO);
if (!stream)
return false;
filter_collection_->SelectVideoDecoder(&pipeline_init_state_->video_decoder);
if (!pipeline_init_state_->video_decoder) {
SetError(PIPELINE_ERROR_REQUIRED_FILTER_MISSING);
return false;
}
pipeline_init_state_->video_decoder->Initialize(
stream,
base::Bind(&Pipeline::OnFilterInitialize, this),
base::Bind(&Pipeline::OnUpdateStatistics, this));
video_decoder_ = pipeline_init_state_->video_decoder;
return true;
}
bool Pipeline::InitializeAudioRenderer( bool Pipeline::InitializeAudioRenderer(
const scoped_refptr<AudioDecoder>& decoder) { const scoped_refptr<AudioDecoder>& decoder) {
DCHECK(message_loop_->BelongsToCurrentThread()); DCHECK(message_loop_->BelongsToCurrentThread());
...@@ -1118,11 +1071,11 @@ bool Pipeline::InitializeAudioRenderer( ...@@ -1118,11 +1071,11 @@ bool Pipeline::InitializeAudioRenderer(
} }
bool Pipeline::InitializeVideoRenderer( bool Pipeline::InitializeVideoRenderer(
const scoped_refptr<VideoDecoder>& decoder) { const scoped_refptr<DemuxerStream>& stream) {
DCHECK(message_loop_->BelongsToCurrentThread()); DCHECK(message_loop_->BelongsToCurrentThread());
DCHECK(IsPipelineOk()); DCHECK(IsPipelineOk());
if (!decoder) if (!stream)
return false; return false;
filter_collection_->SelectVideoRenderer(&video_renderer_); filter_collection_->SelectVideoRenderer(&video_renderer_);
...@@ -1132,7 +1085,8 @@ bool Pipeline::InitializeVideoRenderer( ...@@ -1132,7 +1085,8 @@ bool Pipeline::InitializeVideoRenderer(
} }
video_renderer_->Initialize( video_renderer_->Initialize(
decoder, stream,
*filter_collection_->GetVideoDecoders(),
base::Bind(&Pipeline::OnFilterInitialize, this), base::Bind(&Pipeline::OnFilterInitialize, this),
base::Bind(&Pipeline::OnUpdateStatistics, this), base::Bind(&Pipeline::OnUpdateStatistics, this),
base::Bind(&Pipeline::OnVideoTimeUpdate, this), base::Bind(&Pipeline::OnVideoTimeUpdate, this),
...@@ -1141,6 +1095,7 @@ bool Pipeline::InitializeVideoRenderer( ...@@ -1141,6 +1095,7 @@ bool Pipeline::InitializeVideoRenderer(
base::Bind(&Pipeline::SetError, this), base::Bind(&Pipeline::SetError, this),
base::Bind(&Pipeline::GetMediaTime, this), base::Bind(&Pipeline::GetMediaTime, this),
base::Bind(&Pipeline::GetMediaDuration, this)); base::Bind(&Pipeline::GetMediaDuration, this));
filter_collection_->GetVideoDecoders()->clear();
return true; return true;
} }
...@@ -1174,7 +1129,6 @@ void Pipeline::TearDownPipeline() { ...@@ -1174,7 +1129,6 @@ void Pipeline::TearDownPipeline() {
case kInitDemuxer: case kInitDemuxer:
case kInitAudioDecoder: case kInitAudioDecoder:
case kInitAudioRenderer: case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer: case kInitVideoRenderer:
// Make it look like initialization was successful. // Make it look like initialization was successful.
filter_collection_.reset(); filter_collection_.reset();
......
...@@ -31,7 +31,6 @@ class AudioDecoder; ...@@ -31,7 +31,6 @@ class AudioDecoder;
class Clock; class Clock;
class FilterCollection; class FilterCollection;
class MediaLog; class MediaLog;
class VideoDecoder;
class VideoRenderer; class VideoRenderer;
// Adapter for using asynchronous Pipeline methods in code that wants to run // Adapter for using asynchronous Pipeline methods in code that wants to run
...@@ -214,7 +213,6 @@ class MEDIA_EXPORT Pipeline ...@@ -214,7 +213,6 @@ class MEDIA_EXPORT Pipeline
kInitDemuxer, kInitDemuxer,
kInitAudioDecoder, kInitAudioDecoder,
kInitAudioRenderer, kInitAudioRenderer,
kInitVideoDecoder,
kInitVideoRenderer, kInitVideoRenderer,
kPausing, kPausing,
kSeeking, kSeeking,
...@@ -352,16 +350,16 @@ class MEDIA_EXPORT Pipeline ...@@ -352,16 +350,16 @@ class MEDIA_EXPORT Pipeline
// Returns true if the asynchronous action of creating decoder has started. // Returns true if the asynchronous action of creating decoder has started.
// Returns false if this method did nothing because the corresponding // Returns false if this method did nothing because the corresponding
// audio/video stream does not exist. // audio stream does not exist.
bool InitializeAudioDecoder(const scoped_refptr<Demuxer>& demuxer); bool InitializeAudioDecoder(const scoped_refptr<Demuxer>& demuxer);
bool InitializeVideoDecoder(const scoped_refptr<Demuxer>& demuxer);
// Initializes a renderer and connects it with decoder. Returns true if the // Initializes a renderer and connects it with decoder. Returns true if the
// asynchronous action of creating renderer has started. Returns // asynchronous action of creating renderer has started. Returns
// false if this method did nothing because the corresponding audio/video // false if this method did nothing because the corresponding audio/video
// stream does not exist. // stream does not exist.
bool InitializeAudioRenderer(const scoped_refptr<AudioDecoder>& decoder); bool InitializeAudioRenderer(const scoped_refptr<AudioDecoder>& decoder);
bool InitializeVideoRenderer(const scoped_refptr<VideoDecoder>& decoder); bool InitializeVideoRenderer(
const scoped_refptr<DemuxerStream>& stream);
// Kicks off destroying filters. Called by StopTask() and ErrorChangedTask(). // Kicks off destroying filters. Called by StopTask() and ErrorChangedTask().
// When we start to tear down the pipeline, we will consider two cases: // When we start to tear down the pipeline, we will consider two cases:
...@@ -496,17 +494,16 @@ class MEDIA_EXPORT Pipeline ...@@ -496,17 +494,16 @@ class MEDIA_EXPORT Pipeline
PipelineStatusCB ended_cb_; PipelineStatusCB ended_cb_;
PipelineStatusCB error_cb_; PipelineStatusCB error_cb_;
// Decoder reference used for signalling imminent shutdown. // Audio renderer reference used for setting the volume and determining
// This is a HACK necessary because WebMediaPlayerImpl::Destroy() holds the
// renderer thread loop hostage for until PipelineImpl::Stop() calls its
// callback.
// This reference should only be used for this hack and no other purposes.
// http://crbug.com/110228 tracks removing this hack.
scoped_refptr<VideoDecoder> video_decoder_;
// Renderer references used for setting the volume and determining
// when playback has finished. // when playback has finished.
scoped_refptr<AudioRenderer> audio_renderer_; scoped_refptr<AudioRenderer> audio_renderer_;
// Video Renderer reference used for determining when playback has finished
// and for signalling imminent shutdown.
// The signalling imminent shutdown is a HACK necessary because
// WebMediaPlayerImpl::Destroy() holds the render thread loop hostage
// until PipelineImpl::Stop() calls its callback.
// http://crbug.com/110228 tracks removing this hack.
scoped_refptr<VideoRenderer> video_renderer_; scoped_refptr<VideoRenderer> video_renderer_;
// Demuxer reference used for setting the preload value. // Demuxer reference used for setting the preload value.
......
...@@ -54,10 +54,18 @@ ACTION(RunPipelineStatusCB) { ...@@ -54,10 +54,18 @@ ACTION(RunPipelineStatusCB) {
arg1.Run(PIPELINE_OK); arg1.Run(PIPELINE_OK);
} }
ACTION(RunPipelineStatusCB2) {
arg2.Run(PIPELINE_OK);
}
ACTION_P(RunPipelineStatusCBWithStatus, status) { ACTION_P(RunPipelineStatusCBWithStatus, status) {
arg1.Run(status); arg1.Run(status);
} }
ACTION_P(RunPipelineStatusCB2WithStatus, status) {
arg2.Run(status);
}
// Used for setting expectations on pipeline callbacks. Using a StrictMock // Used for setting expectations on pipeline callbacks. Using a StrictMock
// also lets us test for missing callbacks. // also lets us test for missing callbacks.
class CallbackHelper { class CallbackHelper {
...@@ -164,13 +172,6 @@ class PipelineTest : public ::testing::Test { ...@@ -164,13 +172,6 @@ class PipelineTest : public ::testing::Test {
return stream; return stream;
} }
// Sets up expectations to allow the video decoder to initialize.
void InitializeVideoDecoder(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->video_decoder(),
Initialize(stream, _, _))
.WillOnce(RunPipelineStatusCB());
}
// Sets up expectations to allow the audio decoder to initialize. // Sets up expectations to allow the audio decoder to initialize.
void InitializeAudioDecoder(const scoped_refptr<DemuxerStream>& stream) { void InitializeAudioDecoder(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->audio_decoder(), Initialize(stream, _, _)) EXPECT_CALL(*mocks_->audio_decoder(), Initialize(stream, _, _))
...@@ -178,11 +179,10 @@ class PipelineTest : public ::testing::Test { ...@@ -178,11 +179,10 @@ class PipelineTest : public ::testing::Test {
} }
// Sets up expectations to allow the video renderer to initialize. // Sets up expectations to allow the video renderer to initialize.
void InitializeVideoRenderer() { void InitializeVideoRenderer(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->video_renderer(), Initialize( EXPECT_CALL(*mocks_->video_renderer(), Initialize(
scoped_refptr<VideoDecoder>(mocks_->video_decoder()), stream, _, _, _, _, _, _, _, _, _))
_, _, _, _, _, _, _, _)) .WillOnce(RunPipelineStatusCB2());
.WillOnce(RunPipelineStatusCB());
EXPECT_CALL(*mocks_->video_renderer(), SetPlaybackRate(0.0f)); EXPECT_CALL(*mocks_->video_renderer(), SetPlaybackRate(0.0f));
// Startup sequence. // Startup sequence.
...@@ -419,8 +419,7 @@ TEST_F(PipelineTest, VideoStream) { ...@@ -419,8 +419,7 @@ TEST_F(PipelineTest, VideoStream) {
streams.push_back(video_stream()); streams.push_back(video_stream());
InitializeDemuxer(&streams); InitializeDemuxer(&streams);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio()); EXPECT_FALSE(pipeline_->HasAudio());
...@@ -437,8 +436,7 @@ TEST_F(PipelineTest, AudioVideoStream) { ...@@ -437,8 +436,7 @@ TEST_F(PipelineTest, AudioVideoStream) {
InitializeDemuxer(&streams); InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(); InitializeAudioRenderer();
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_TRUE(pipeline_->HasAudio()); EXPECT_TRUE(pipeline_->HasAudio());
...@@ -455,8 +453,7 @@ TEST_F(PipelineTest, Seek) { ...@@ -455,8 +453,7 @@ TEST_F(PipelineTest, Seek) {
InitializeDemuxer(&streams, base::TimeDelta::FromSeconds(3000)); InitializeDemuxer(&streams, base::TimeDelta::FromSeconds(3000));
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(); InitializeAudioRenderer();
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
// Initialize then seek! // Initialize then seek!
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
...@@ -492,8 +489,7 @@ TEST_F(PipelineTest, Properties) { ...@@ -492,8 +489,7 @@ TEST_F(PipelineTest, Properties) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100); const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration); InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_EQ(kDuration.ToInternalValue(), EXPECT_EQ(kDuration.ToInternalValue(),
...@@ -509,8 +505,7 @@ TEST_F(PipelineTest, GetBufferedTimeRanges) { ...@@ -509,8 +505,7 @@ TEST_F(PipelineTest, GetBufferedTimeRanges) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100); const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration); InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
...@@ -564,8 +559,7 @@ TEST_F(PipelineTest, DisableAudioRenderer) { ...@@ -564,8 +559,7 @@ TEST_F(PipelineTest, DisableAudioRenderer) {
InitializeDemuxer(&streams); InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(); InitializeAudioRenderer();
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_TRUE(pipeline_->HasAudio()); EXPECT_TRUE(pipeline_->HasAudio());
...@@ -589,8 +583,7 @@ TEST_F(PipelineTest, DisableAudioRendererDuringInit) { ...@@ -589,8 +583,7 @@ TEST_F(PipelineTest, DisableAudioRendererDuringInit) {
InitializeDemuxer(&streams); InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(true); InitializeAudioRenderer(true);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
EXPECT_CALL(*mocks_->demuxer(), EXPECT_CALL(*mocks_->demuxer(),
OnAudioRendererDisabled()); OnAudioRendererDisabled());
...@@ -614,8 +607,7 @@ TEST_F(PipelineTest, EndedCallback) { ...@@ -614,8 +607,7 @@ TEST_F(PipelineTest, EndedCallback) {
InitializeDemuxer(&streams); InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(); InitializeAudioRenderer();
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
// The ended callback shouldn't run until both renderers have ended. // The ended callback shouldn't run until both renderers have ended.
...@@ -649,8 +641,7 @@ TEST_F(PipelineTest, AudioStreamShorterThanVideo) { ...@@ -649,8 +641,7 @@ TEST_F(PipelineTest, AudioStreamShorterThanVideo) {
InitializeDemuxer(&streams, duration); InitializeDemuxer(&streams, duration);
InitializeAudioDecoder(audio_stream()); InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(); InitializeAudioRenderer();
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_EQ(0, pipeline_->GetMediaTime().ToInternalValue()); EXPECT_EQ(0, pipeline_->GetMediaTime().ToInternalValue());
...@@ -786,8 +777,7 @@ TEST_F(PipelineTest, StartTimeIsZero) { ...@@ -786,8 +777,7 @@ TEST_F(PipelineTest, StartTimeIsZero) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100); const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration); InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio()); EXPECT_FALSE(pipeline_->HasAudio());
...@@ -808,8 +798,7 @@ TEST_F(PipelineTest, StartTimeIsNonZero) { ...@@ -808,8 +798,7 @@ TEST_F(PipelineTest, StartTimeIsNonZero) {
streams.push_back(video_stream()); streams.push_back(video_stream());
InitializeDemuxer(&streams, kDuration); InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream()); InitializeVideoRenderer(video_stream());
InitializeVideoRenderer();
InitializePipeline(PIPELINE_OK); InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio()); EXPECT_FALSE(pipeline_->HasAudio());
...@@ -937,7 +926,6 @@ class PipelineTeardownTest : public PipelineTest { ...@@ -937,7 +926,6 @@ class PipelineTeardownTest : public PipelineTest {
kInitDemuxer, kInitDemuxer,
kInitAudioDecoder, kInitAudioDecoder,
kInitAudioRenderer, kInitAudioRenderer,
kInitVideoDecoder,
kInitVideoRenderer, kInitVideoRenderer,
kPausing, kPausing,
kFlushing, kFlushing,
...@@ -960,7 +948,6 @@ class PipelineTeardownTest : public PipelineTest { ...@@ -960,7 +948,6 @@ class PipelineTeardownTest : public PipelineTest {
case kInitDemuxer: case kInitDemuxer:
case kInitAudioDecoder: case kInitAudioDecoder:
case kInitAudioRenderer: case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer: case kInitVideoRenderer:
DoInitialize(state, stop_or_error); DoInitialize(state, stop_or_error);
break; break;
...@@ -1080,36 +1067,17 @@ class PipelineTeardownTest : public PipelineTest { ...@@ -1080,36 +1067,17 @@ class PipelineTeardownTest : public PipelineTest {
EXPECT_CALL(*mocks_->audio_renderer(), Initialize(_, _, _, _, _, _, _)) EXPECT_CALL(*mocks_->audio_renderer(), Initialize(_, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB()); .WillOnce(RunPipelineStatusCB());
if (state == kInitVideoDecoder) {
if (stop_or_error == kStop) {
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB()));
EXPECT_CALL(callbacks_, OnStop());
} else {
status = PIPELINE_ERROR_DECODE;
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(RunPipelineStatusCBWithStatus(status));
}
EXPECT_CALL(*mocks_->demuxer(), Stop(_)).WillOnce(RunClosure());
EXPECT_CALL(*mocks_->audio_renderer(), Stop(_)).WillOnce(RunClosure());
return status;
}
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB());
if (state == kInitVideoRenderer) { if (state == kInitVideoRenderer) {
if (stop_or_error == kStop) { if (stop_or_error == kStop) {
EXPECT_CALL(*mocks_->video_renderer(), EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _)) Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB())); .WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB2()));
EXPECT_CALL(callbacks_, OnStop()); EXPECT_CALL(callbacks_, OnStop());
} else { } else {
status = PIPELINE_ERROR_INITIALIZATION_FAILED; status = PIPELINE_ERROR_INITIALIZATION_FAILED;
EXPECT_CALL(*mocks_->video_renderer(), EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _)) Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCBWithStatus(status)); .WillOnce(RunPipelineStatusCB2WithStatus(status));
} }
EXPECT_CALL(*mocks_->demuxer(), Stop(_)).WillOnce(RunClosure()); EXPECT_CALL(*mocks_->demuxer(), Stop(_)).WillOnce(RunClosure());
...@@ -1119,8 +1087,8 @@ class PipelineTeardownTest : public PipelineTest { ...@@ -1119,8 +1087,8 @@ class PipelineTeardownTest : public PipelineTest {
} }
EXPECT_CALL(*mocks_->video_renderer(), EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _)) Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB()); .WillOnce(RunPipelineStatusCB2());
// If we get here it's a successful initialization. // If we get here it's a successful initialization.
EXPECT_CALL(*mocks_->demuxer(), SetPlaybackRate(0.0f)); EXPECT_CALL(*mocks_->demuxer(), SetPlaybackRate(0.0f));
...@@ -1314,7 +1282,6 @@ class PipelineTeardownTest : public PipelineTest { ...@@ -1314,7 +1282,6 @@ class PipelineTeardownTest : public PipelineTest {
INSTANTIATE_TEARDOWN_TEST(Stop, InitDemuxer); INSTANTIATE_TEARDOWN_TEST(Stop, InitDemuxer);
INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioDecoder); INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioDecoder);
INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioRenderer); INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioRenderer);
INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoDecoder);
INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoRenderer); INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoRenderer);
INSTANTIATE_TEARDOWN_TEST(Stop, Pausing); INSTANTIATE_TEARDOWN_TEST(Stop, Pausing);
INSTANTIATE_TEARDOWN_TEST(Stop, Flushing); INSTANTIATE_TEARDOWN_TEST(Stop, Flushing);
...@@ -1326,7 +1293,6 @@ INSTANTIATE_TEARDOWN_TEST(Stop, Playing); ...@@ -1326,7 +1293,6 @@ INSTANTIATE_TEARDOWN_TEST(Stop, Playing);
INSTANTIATE_TEARDOWN_TEST(Error, InitDemuxer); INSTANTIATE_TEARDOWN_TEST(Error, InitDemuxer);
INSTANTIATE_TEARDOWN_TEST(Error, InitAudioDecoder); INSTANTIATE_TEARDOWN_TEST(Error, InitAudioDecoder);
INSTANTIATE_TEARDOWN_TEST(Error, InitAudioRenderer); INSTANTIATE_TEARDOWN_TEST(Error, InitAudioRenderer);
INSTANTIATE_TEARDOWN_TEST(Error, InitVideoDecoder);
INSTANTIATE_TEARDOWN_TEST(Error, InitVideoRenderer); INSTANTIATE_TEARDOWN_TEST(Error, InitVideoRenderer);
INSTANTIATE_TEARDOWN_TEST(Error, Pausing); INSTANTIATE_TEARDOWN_TEST(Error, Pausing);
INSTANTIATE_TEARDOWN_TEST(Error, Flushing); INSTANTIATE_TEARDOWN_TEST(Error, Flushing);
......
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
#ifndef MEDIA_BASE_VIDEO_RENDERER_H_ #ifndef MEDIA_BASE_VIDEO_RENDERER_H_
#define MEDIA_BASE_VIDEO_RENDERER_H_ #define MEDIA_BASE_VIDEO_RENDERER_H_
#include <list>
#include "base/callback.h" #include "base/callback.h"
#include "base/memory/ref_counted.h" #include "base/memory/ref_counted.h"
#include "base/time.h" #include "base/time.h"
...@@ -17,11 +19,14 @@ class Size; ...@@ -17,11 +19,14 @@ class Size;
namespace media { namespace media {
class DemuxerStream;
class VideoDecoder; class VideoDecoder;
class MEDIA_EXPORT VideoRenderer class MEDIA_EXPORT VideoRenderer
: public base::RefCountedThreadSafe<VideoRenderer> { : public base::RefCountedThreadSafe<VideoRenderer> {
public: public:
typedef std::list<scoped_refptr<VideoDecoder> > VideoDecoderList;
// Used to update the pipeline's clock time. The parameter is the time that // Used to update the pipeline's clock time. The parameter is the time that
// the clock should not exceed. // the clock should not exceed.
typedef base::Callback<void(base::TimeDelta)> TimeCB; typedef base::Callback<void(base::TimeDelta)> TimeCB;
...@@ -50,7 +55,8 @@ class MEDIA_EXPORT VideoRenderer ...@@ -50,7 +55,8 @@ class MEDIA_EXPORT VideoRenderer
// |get_time_cb| is used to query the current media playback time. // |get_time_cb| is used to query the current media playback time.
// //
// |get_duration_cb| is used to query the media duration. // |get_duration_cb| is used to query the media duration.
virtual void Initialize(const scoped_refptr<VideoDecoder>& decoder, virtual void Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb, const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb, const StatisticsCB& statistics_cb,
const TimeCB& time_cb, const TimeCB& time_cb,
...@@ -85,6 +91,13 @@ class MEDIA_EXPORT VideoRenderer ...@@ -85,6 +91,13 @@ class MEDIA_EXPORT VideoRenderer
// Updates the current playback rate. // Updates the current playback rate.
virtual void SetPlaybackRate(float playback_rate) = 0; virtual void SetPlaybackRate(float playback_rate) = 0;
// Prepare decoder for shutdown. This is a HACK needed because
// PipelineImpl::Stop() goes through a Pause/Flush/Stop dance to all its
// filters, waiting for each state transition to complete before starting the
// next, but WebMediaPlayerImpl::Destroy() holds the renderer loop hostage for
// the duration. http://crbug.com/110228 tracks removing this.
virtual void PrepareForShutdownHack() = 0;
protected: protected:
friend class base::RefCountedThreadSafe<VideoRenderer>; friend class base::RefCountedThreadSafe<VideoRenderer>;
......
...@@ -197,7 +197,8 @@ PipelineIntegrationTestBase::CreateFilterCollection( ...@@ -197,7 +197,8 @@ PipelineIntegrationTestBase::CreateFilterCollection(
base::Unretained(message_loop_factory_.get()), base::Unretained(message_loop_factory_.get()),
"VideoDecoderThread"), "VideoDecoderThread"),
decryptor); decryptor);
collection->AddVideoDecoder(decoder); collection->GetVideoDecoders()->push_back(decoder);
// Disable frame dropping if hashing is enabled. // Disable frame dropping if hashing is enabled.
renderer_ = new VideoRendererBase( renderer_ = new VideoRendererBase(
base::Bind(&PipelineIntegrationTestBase::OnVideoRendererPaint, base::Bind(&PipelineIntegrationTestBase::OnVideoRendererPaint,
......
...@@ -112,7 +112,8 @@ void VideoRendererBase::Preroll(base::TimeDelta time, ...@@ -112,7 +112,8 @@ void VideoRendererBase::Preroll(base::TimeDelta time,
AttemptRead_Locked(); AttemptRead_Locked();
} }
void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder, void VideoRendererBase::Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb, const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb, const StatisticsCB& statistics_cb,
const TimeCB& max_time_cb, const TimeCB& max_time_cb,
...@@ -122,7 +123,9 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder, ...@@ -122,7 +123,9 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
const TimeDeltaCB& get_time_cb, const TimeDeltaCB& get_time_cb,
const TimeDeltaCB& get_duration_cb) { const TimeDeltaCB& get_duration_cb) {
base::AutoLock auto_lock(lock_); base::AutoLock auto_lock(lock_);
DCHECK(decoder); DCHECK(stream);
DCHECK(!decoders.empty());
DCHECK_EQ(stream->type(), DemuxerStream::VIDEO);
DCHECK(!init_cb.is_null()); DCHECK(!init_cb.is_null());
DCHECK(!statistics_cb.is_null()); DCHECK(!statistics_cb.is_null());
DCHECK(!max_time_cb.is_null()); DCHECK(!max_time_cb.is_null());
...@@ -131,8 +134,8 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder, ...@@ -131,8 +134,8 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
DCHECK(!get_time_cb.is_null()); DCHECK(!get_time_cb.is_null());
DCHECK(!get_duration_cb.is_null()); DCHECK(!get_duration_cb.is_null());
DCHECK_EQ(kUninitialized, state_); DCHECK_EQ(kUninitialized, state_);
decoder_ = decoder;
init_cb_ = init_cb;
statistics_cb_ = statistics_cb; statistics_cb_ = statistics_cb;
max_time_cb_ = max_time_cb; max_time_cb_ = max_time_cb;
size_changed_cb_ = size_changed_cb; size_changed_cb_ = size_changed_cb;
...@@ -141,20 +144,65 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder, ...@@ -141,20 +144,65 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
get_time_cb_ = get_time_cb; get_time_cb_ = get_time_cb;
get_duration_cb_ = get_duration_cb; get_duration_cb_ = get_duration_cb;
scoped_ptr<VideoDecoderList> decoder_list(new VideoDecoderList(decoders));
InitializeNextDecoder(stream, decoder_list.Pass());
}
void VideoRendererBase::InitializeNextDecoder(
const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders) {
lock_.AssertAcquired();
DCHECK(!decoders->empty());
scoped_refptr<VideoDecoder> decoder = decoders->front();
decoders->pop_front();
DCHECK(decoder);
decoder_ = decoder;
base::AutoUnlock auto_unlock(lock_);
decoder->Initialize(
demuxer_stream,
base::Bind(&VideoRendererBase::OnDecoderInitDone, this,
demuxer_stream,
base::Passed(&decoders)),
statistics_cb_);
}
void VideoRendererBase::OnDecoderInitDone(
const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders,
PipelineStatus status) {
base::AutoLock auto_lock(lock_);
if (state_ == kStopped)
return;
if (!decoders->empty() && status == DECODER_ERROR_NOT_SUPPORTED) {
InitializeNextDecoder(demuxer_stream, decoders.Pass());
return;
}
if (status != PIPELINE_OK) {
state_ = kError;
base::ResetAndReturn(&init_cb_).Run(status);
return;
}
// We're all good! Consider ourselves flushed. (ThreadMain() should never // We're all good! Consider ourselves flushed. (ThreadMain() should never
// see us in the kUninitialized state). // see us in the kUninitialized state).
// Since we had an initial Preroll(), we consider ourself flushed, because we // Since we had an initial Preroll(), we consider ourself flushed, because we
// have not populated any buffers yet. // have not populated any buffers yet.
state_ = kFlushed; state_ = kFlushed;
set_opaque_cb_.Run(!decoder->HasAlpha()); set_opaque_cb_.Run(!decoder_->HasAlpha());
set_opaque_cb_.Reset(); set_opaque_cb_.Reset();
// Create our video thread. // Create our video thread.
if (!base::PlatformThread::Create(0, this, &thread_)) { if (!base::PlatformThread::Create(0, this, &thread_)) {
NOTREACHED() << "Video thread creation failed"; NOTREACHED() << "Video thread creation failed";
state_ = kError; state_ = kError;
init_cb.Run(PIPELINE_ERROR_INITIALIZATION_FAILED); base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
return; return;
} }
...@@ -163,7 +211,13 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder, ...@@ -163,7 +211,13 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
// TODO(scherkus): find out if this is necessary, but it seems to help. // TODO(scherkus): find out if this is necessary, but it seems to help.
::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL); ::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL);
#endif // defined(OS_WIN) #endif // defined(OS_WIN)
init_cb.Run(PIPELINE_OK); base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
}
void VideoRendererBase::PrepareForShutdownHack() {
base::AutoLock auto_lock(lock_);
if (decoder_)
decoder_->PrepareForShutdownHack();
} }
// PlatformThread::Delegate implementation. // PlatformThread::Delegate implementation.
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include "base/synchronization/condition_variable.h" #include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h" #include "base/synchronization/lock.h"
#include "base/threading/platform_thread.h" #include "base/threading/platform_thread.h"
#include "media/base/demuxer_stream.h"
#include "media/base/pipeline_status.h" #include "media/base/pipeline_status.h"
#include "media/base/video_decoder.h" #include "media/base/video_decoder.h"
#include "media/base/video_frame.h" #include "media/base/video_frame.h"
...@@ -50,7 +51,8 @@ class MEDIA_EXPORT VideoRendererBase ...@@ -50,7 +51,8 @@ class MEDIA_EXPORT VideoRendererBase
bool drop_frames); bool drop_frames);
// VideoRenderer implementation. // VideoRenderer implementation.
virtual void Initialize(const scoped_refptr<VideoDecoder>& decoder, virtual void Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb, const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb, const StatisticsCB& statistics_cb,
const TimeCB& max_time_cb, const TimeCB& max_time_cb,
...@@ -66,6 +68,7 @@ class MEDIA_EXPORT VideoRendererBase ...@@ -66,6 +68,7 @@ class MEDIA_EXPORT VideoRendererBase
const PipelineStatusCB& cb) OVERRIDE; const PipelineStatusCB& cb) OVERRIDE;
virtual void Stop(const base::Closure& callback) OVERRIDE; virtual void Stop(const base::Closure& callback) OVERRIDE;
virtual void SetPlaybackRate(float playback_rate) OVERRIDE; virtual void SetPlaybackRate(float playback_rate) OVERRIDE;
virtual void PrepareForShutdownHack() OVERRIDE;
// PlatformThread::Delegate implementation. // PlatformThread::Delegate implementation.
virtual void ThreadMain() OVERRIDE; virtual void ThreadMain() OVERRIDE;
...@@ -119,6 +122,18 @@ class MEDIA_EXPORT VideoRendererBase ...@@ -119,6 +122,18 @@ class MEDIA_EXPORT VideoRendererBase
// |size_changed_cb_| if the natural size changes. // |size_changed_cb_| if the natural size changes.
void SetCurrentFrameToNextReadyFrame(); void SetCurrentFrameToNextReadyFrame();
// Pops the front of |decoders|, assigns it to |decoder_| and then
// calls initialize on the new decoder.
void InitializeNextDecoder(const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders);
// Called when |decoder_| initialization completes.
// |demuxer_stream| & |decoders| are used if initialization failed and
// InitializeNextDecoder() needs to be called again.
void OnDecoderInitDone(const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders,
PipelineStatus status);
// Used for accessing data members. // Used for accessing data members.
base::Lock lock_; base::Lock lock_;
...@@ -210,6 +225,7 @@ class MEDIA_EXPORT VideoRendererBase ...@@ -210,6 +225,7 @@ class MEDIA_EXPORT VideoRendererBase
PipelineStatusCB preroll_cb_; PipelineStatusCB preroll_cb_;
// Event callbacks. // Event callbacks.
PipelineStatusCB init_cb_;
StatisticsCB statistics_cb_; StatisticsCB statistics_cb_;
TimeCB max_time_cb_; TimeCB max_time_cb_;
NaturalSizeChangedCB size_changed_cb_; NaturalSizeChangedCB size_changed_cb_;
......
...@@ -34,10 +34,15 @@ static const int kVideoDuration = kFrameDuration * 100; ...@@ -34,10 +34,15 @@ static const int kVideoDuration = kFrameDuration * 100;
static const int kEndOfStream = -1; static const int kEndOfStream = -1;
static const gfx::Size kNaturalSize(16u, 16u); static const gfx::Size kNaturalSize(16u, 16u);
ACTION_P(RunPipelineStatusCB1, status) {
arg1.Run(status);
}
class VideoRendererBaseTest : public ::testing::Test { class VideoRendererBaseTest : public ::testing::Test {
public: public:
VideoRendererBaseTest() VideoRendererBaseTest()
: decoder_(new MockVideoDecoder()), : decoder_(new MockVideoDecoder()),
demuxer_stream_(new MockDemuxerStream()),
cv_(&lock_), cv_(&lock_),
event_(false, false), event_(false, false),
timeout_(TestTimeouts::action_timeout()), timeout_(TestTimeouts::action_timeout()),
...@@ -51,6 +56,9 @@ class VideoRendererBaseTest : public ::testing::Test { ...@@ -51,6 +56,9 @@ class VideoRendererBaseTest : public ::testing::Test {
base::Bind(&VideoRendererBaseTest::OnSetOpaque, base::Unretained(this)), base::Bind(&VideoRendererBaseTest::OnSetOpaque, base::Unretained(this)),
true); true);
EXPECT_CALL(*demuxer_stream_, type())
.WillRepeatedly(Return(DemuxerStream::VIDEO));
// We expect these to be called but we don't care how/when. // We expect these to be called but we don't care how/when.
EXPECT_CALL(*decoder_, Stop(_)) EXPECT_CALL(*decoder_, Stop(_))
.WillRepeatedly(RunClosure()); .WillRepeatedly(RunClosure());
...@@ -98,13 +106,29 @@ class VideoRendererBaseTest : public ::testing::Test { ...@@ -98,13 +106,29 @@ class VideoRendererBaseTest : public ::testing::Test {
InSequence s; InSequence s;
EXPECT_CALL(*decoder_, Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB1(PIPELINE_OK));
// Set playback rate before anything else happens. // Set playback rate before anything else happens.
renderer_->SetPlaybackRate(1.0f); renderer_->SetPlaybackRate(1.0f);
// Initialize, we shouldn't have any reads. // Initialize, we shouldn't have any reads.
InitializeRenderer(PIPELINE_OK);
// We expect the video size to be set.
EXPECT_CALL(*this, OnNaturalSizeChanged(kNaturalSize));
// Start prerolling.
Preroll(0);
}
void InitializeRenderer(PipelineStatus expected_status) {
VideoRendererBase::VideoDecoderList decoders;
decoders.push_back(decoder_);
renderer_->Initialize( renderer_->Initialize(
decoder_, demuxer_stream_,
NewExpectedStatusCB(PIPELINE_OK), decoders,
NewExpectedStatusCB(expected_status),
base::Bind(&MockStatisticsCB::OnStatistics, base::Bind(&MockStatisticsCB::OnStatistics,
base::Unretained(&statistics_cb_object_)), base::Unretained(&statistics_cb_object_)),
base::Bind(&VideoRendererBaseTest::OnTimeUpdate, base::Bind(&VideoRendererBaseTest::OnTimeUpdate,
...@@ -116,12 +140,6 @@ class VideoRendererBaseTest : public ::testing::Test { ...@@ -116,12 +140,6 @@ class VideoRendererBaseTest : public ::testing::Test {
base::Bind(&VideoRendererBaseTest::GetTime, base::Unretained(this)), base::Bind(&VideoRendererBaseTest::GetTime, base::Unretained(this)),
base::Bind(&VideoRendererBaseTest::GetDuration, base::Bind(&VideoRendererBaseTest::GetDuration,
base::Unretained(this))); base::Unretained(this)));
// We expect the video size to be set.
EXPECT_CALL(*this, OnNaturalSizeChanged(kNaturalSize));
// Start prerolling.
Preroll(0);
} }
// Instead of immediately satisfying a decoder Read request, queue it up. // Instead of immediately satisfying a decoder Read request, queue it up.
...@@ -311,6 +329,7 @@ class VideoRendererBaseTest : public ::testing::Test { ...@@ -311,6 +329,7 @@ class VideoRendererBaseTest : public ::testing::Test {
// Fixture members. // Fixture members.
scoped_refptr<VideoRendererBase> renderer_; scoped_refptr<VideoRendererBase> renderer_;
scoped_refptr<MockVideoDecoder> decoder_; scoped_refptr<MockVideoDecoder> decoder_;
scoped_refptr<MockDemuxerStream> demuxer_stream_;
MockStatisticsCB statistics_cb_object_; MockStatisticsCB statistics_cb_object_;
// Receives all the buffers that renderer had provided to |decoder_|. // Receives all the buffers that renderer had provided to |decoder_|.
...@@ -671,4 +690,13 @@ TEST_F(VideoRendererBaseTest, AbortPendingRead_Preroll) { ...@@ -671,4 +690,13 @@ TEST_F(VideoRendererBaseTest, AbortPendingRead_Preroll) {
Shutdown(); Shutdown();
} }
TEST_F(VideoRendererBaseTest, VideoDecoder_InitFailure) {
InSequence s;
EXPECT_CALL(*decoder_, Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB1(PIPELINE_ERROR_DECODE));
InitializeRenderer(PIPELINE_ERROR_DECODE);
}
} // namespace media } // namespace media
...@@ -77,7 +77,7 @@ bool Movie::Open(const wchar_t* url, VideoRendererBase* video_renderer) { ...@@ -77,7 +77,7 @@ bool Movie::Open(const wchar_t* url, VideoRendererBase* video_renderer) {
base::Bind(&MessageLoopFactory::GetMessageLoop, base::Bind(&MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory_.get()), base::Unretained(message_loop_factory_.get()),
"AudioDecoderThread"))); "AudioDecoderThread")));
collection->AddVideoDecoder(new FFmpegVideoDecoder( collection->GetVideoDecoders()->push_back(new FFmpegVideoDecoder(
base::Bind(&MessageLoopFactory::GetMessageLoop, base::Bind(&MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory_.get()), base::Unretained(message_loop_factory_.get()),
"VideoDecoderThread"), "VideoDecoderThread"),
......
...@@ -119,7 +119,7 @@ bool InitPipeline(MessageLoop* message_loop, ...@@ -119,7 +119,7 @@ bool InitPipeline(MessageLoop* message_loop,
base::Bind(&media::MessageLoopFactory::GetMessageLoop, base::Bind(&media::MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory), base::Unretained(message_loop_factory),
"AudioDecoderThread"))); "AudioDecoderThread")));
collection->AddVideoDecoder(new media::FFmpegVideoDecoder( collection->GetVideoDecoders()->push_back(new media::FFmpegVideoDecoder(
base::Bind(&media::MessageLoopFactory::GetMessageLoop, base::Bind(&media::MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory), base::Unretained(message_loop_factory),
"VideoDecoderThread"), "VideoDecoderThread"),
......
...@@ -33,7 +33,7 @@ static void AddDefaultDecodersToCollection( ...@@ -33,7 +33,7 @@ static void AddDefaultDecodersToCollection(
base::Unretained(message_loop_factory), base::Unretained(message_loop_factory),
"VideoDecoderThread"), "VideoDecoderThread"),
decryptor); decryptor);
filter_collection->AddVideoDecoder(ffmpeg_video_decoder); filter_collection->GetVideoDecoders()->push_back(ffmpeg_video_decoder);
} }
bool BuildMediaStreamCollection(const WebKit::WebURL& url, bool BuildMediaStreamCollection(const WebKit::WebURL& url,
...@@ -48,16 +48,13 @@ bool BuildMediaStreamCollection(const WebKit::WebURL& url, ...@@ -48,16 +48,13 @@ bool BuildMediaStreamCollection(const WebKit::WebURL& url,
if (!video_decoder) if (!video_decoder)
return false; return false;
// Remove all other decoders and just use the MediaStream one.
// Remove any "traditional" decoders (e.g. GpuVideoDecoder) from the // Remove any "traditional" decoders (e.g. GpuVideoDecoder) from the
// collection. // collection.
// NOTE: http://crbug.com/110800 is about replacing this ad-hockery with // NOTE: http://crbug.com/110800 is about replacing this ad-hockery with
// something more designed. // something more designed.
scoped_refptr<media::VideoDecoder> old_videodecoder; filter_collection->GetVideoDecoders()->clear();
do { filter_collection->GetVideoDecoders()->push_back(video_decoder);
filter_collection->SelectVideoDecoder(&old_videodecoder);
} while (old_videodecoder);
filter_collection->AddVideoDecoder(video_decoder);
filter_collection->SetDemuxer(new media::DummyDemuxer(true, false)); filter_collection->SetDemuxer(new media::DummyDemuxer(true, false));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment