Commit 873da263 authored by acolwell@chromium.org's avatar acolwell@chromium.org

Move VideoDecoder initialization into VideoRendererBase to simplify...

Move VideoDecoder initialization into VideoRendererBase to simplify implementing codec config changes during playback.

BUG=141533
TEST=Existing PipelineTest.*, VideoRendererBaseTest.*

Review URL: https://chromiumcodereview.appspot.com/10836167

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@151132 0039d316-1c4b-4281-b951-d872f2087c98
parent e516a3c1
......@@ -2434,7 +2434,7 @@ WebMediaPlayer* RenderViewImpl::createMediaPlayer(
GpuChannelHost* gpu_channel_host =
RenderThreadImpl::current()->EstablishGpuChannelSync(
content::CAUSE_FOR_GPU_LAUNCH_VIDEODECODEACCELERATOR_INITIALIZE);
collection->AddVideoDecoder(new media::GpuVideoDecoder(
collection->GetVideoDecoders()->push_back(new media::GpuVideoDecoder(
message_loop_factory->GetMessageLoop("GpuVideoDecoder"),
factories_loop,
new RendererGpuVideoDecoderFactories(
......
......@@ -29,10 +29,6 @@ void FilterCollection::AddAudioDecoder(AudioDecoder* audio_decoder) {
audio_decoders_.push_back(audio_decoder);
}
void FilterCollection::AddVideoDecoder(VideoDecoder* video_decoder) {
video_decoders_.push_back(video_decoder);
}
void FilterCollection::AddAudioRenderer(AudioRenderer* audio_renderer) {
audio_renderers_.push_back(audio_renderer);
}
......@@ -41,13 +37,6 @@ void FilterCollection::AddVideoRenderer(VideoRenderer* video_renderer) {
video_renderers_.push_back(video_renderer);
}
bool FilterCollection::IsEmpty() const {
return audio_decoders_.empty() &&
video_decoders_.empty() &&
audio_renderers_.empty() &&
video_renderers_.empty();
}
void FilterCollection::Clear() {
audio_decoders_.clear();
video_decoders_.clear();
......@@ -64,15 +53,6 @@ void FilterCollection::SelectAudioDecoder(scoped_refptr<AudioDecoder>* out) {
audio_decoders_.pop_front();
}
void FilterCollection::SelectVideoDecoder(scoped_refptr<VideoDecoder>* out) {
if (video_decoders_.empty()) {
*out = NULL;
return;
}
*out = video_decoders_.front();
video_decoders_.pop_front();
}
void FilterCollection::SelectAudioRenderer(scoped_refptr<AudioRenderer>* out) {
if (audio_renderers_.empty()) {
*out = NULL;
......@@ -91,4 +71,9 @@ void FilterCollection::SelectVideoRenderer(scoped_refptr<VideoRenderer>* out) {
video_renderers_.pop_front();
}
FilterCollection::VideoDecoderList*
FilterCollection::GetVideoDecoders() {
return &video_decoders_;
}
} // namespace media
......@@ -25,6 +25,8 @@ class VideoRenderer;
// http://crbug.com/110800
class MEDIA_EXPORT FilterCollection {
public:
typedef std::list<scoped_refptr<VideoDecoder> > VideoDecoderList;
FilterCollection();
~FilterCollection();
......@@ -34,13 +36,9 @@ class MEDIA_EXPORT FilterCollection {
// Adds a filter to the collection.
void AddAudioDecoder(AudioDecoder* audio_decoder);
void AddVideoDecoder(VideoDecoder* video_decoder);
void AddAudioRenderer(AudioRenderer* audio_renderer);
void AddVideoRenderer(VideoRenderer* video_renderer);
// Is the collection empty?
bool IsEmpty() const;
// Remove remaining filters.
void Clear();
......@@ -49,14 +47,15 @@ class MEDIA_EXPORT FilterCollection {
// If a filter is returned it is removed from the collection.
// Filters are selected in FIFO order.
void SelectAudioDecoder(scoped_refptr<AudioDecoder>* out);
void SelectVideoDecoder(scoped_refptr<VideoDecoder>* out);
void SelectAudioRenderer(scoped_refptr<AudioRenderer>* out);
void SelectVideoRenderer(scoped_refptr<VideoRenderer>* out);
VideoDecoderList* GetVideoDecoders();
private:
scoped_refptr<Demuxer> demuxer_;
std::list<scoped_refptr<AudioDecoder> > audio_decoders_;
std::list<scoped_refptr<VideoDecoder> > video_decoders_;
VideoDecoderList video_decoders_;
std::list<scoped_refptr<AudioRenderer> > audio_renderers_;
std::list<scoped_refptr<VideoRenderer> > video_renderers_;
......
......@@ -20,54 +20,22 @@ class FilterCollectionTest : public ::testing::Test {
DISALLOW_COPY_AND_ASSIGN(FilterCollectionTest);
};
TEST_F(FilterCollectionTest, TestIsEmptyAndClear) {
EXPECT_TRUE(collection_.IsEmpty());
collection_.AddAudioDecoder(mock_filters_.audio_decoder());
EXPECT_FALSE(collection_.IsEmpty());
collection_.Clear();
EXPECT_TRUE(collection_.IsEmpty());
}
TEST_F(FilterCollectionTest, SelectXXXMethods) {
scoped_refptr<AudioDecoder> audio_decoder;
scoped_refptr<VideoDecoder> video_decoder;
collection_.AddVideoDecoder(mock_filters_.video_decoder());
EXPECT_FALSE(collection_.IsEmpty());
// Verify that the video decoder will not be returned if we
// ask for a different type.
collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_FALSE(audio_decoder);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that we can actually retrieve the video decoder
// and that it is removed from the collection.
collection_.SelectVideoDecoder(&video_decoder);
EXPECT_TRUE(video_decoder);
EXPECT_TRUE(collection_.IsEmpty());
// Add a video decoder and audio decoder.
collection_.AddVideoDecoder(mock_filters_.video_decoder());
// Add an audio decoder.
collection_.AddAudioDecoder(mock_filters_.audio_decoder());
// Verify that we can select the audio decoder.
collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that we can't select it again since only one has been added.
collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_FALSE(audio_decoder);
// Verify that we can select the video decoder and that doing so will
// empty the collection again.
collection_.SelectVideoDecoder(&video_decoder);
EXPECT_TRUE(collection_.IsEmpty());
}
TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) {
......@@ -83,13 +51,11 @@ TEST_F(FilterCollectionTest, MultipleFiltersOfSameType) {
collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder);
EXPECT_EQ(audio_decoder, audio_decoder_a);
EXPECT_FALSE(collection_.IsEmpty());
// Verify that second SelectAudioDecoder() returns audio_decoder_b.
collection_.SelectAudioDecoder(&audio_decoder);
EXPECT_TRUE(audio_decoder);
EXPECT_EQ(audio_decoder, audio_decoder_b);
EXPECT_TRUE(collection_.IsEmpty());
// Verify that third SelectAudioDecoder() returns nothing.
collection_.SelectAudioDecoder(&audio_decoder);
......
......@@ -69,8 +69,6 @@ const char* MediaLog::PipelineStateToString(Pipeline::State state) {
return "initAudioDecoder";
case Pipeline::kInitAudioRenderer:
return "initAudioRenderer";
case Pipeline::kInitVideoDecoder:
return "initVideoDecoder";
case Pipeline::kInitVideoRenderer:
return "initVideoRenderer";
case Pipeline::kPausing:
......
......@@ -77,7 +77,7 @@ MockFilterCollection::~MockFilterCollection() {}
scoped_ptr<FilterCollection> MockFilterCollection::Create() {
scoped_ptr<FilterCollection> collection(new FilterCollection());
collection->SetDemuxer(demuxer_);
collection->AddVideoDecoder(video_decoder_);
collection->GetVideoDecoders()->push_back(video_decoder_);
collection->AddAudioDecoder(audio_decoder_);
collection->AddVideoRenderer(video_renderer_);
collection->AddAudioRenderer(audio_renderer_);
......
......@@ -137,21 +137,23 @@ class MockVideoRenderer : public VideoRenderer {
MockVideoRenderer();
// VideoRenderer implementation.
MOCK_METHOD9(Initialize, void(const scoped_refptr<VideoDecoder>& decoder,
const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb,
const TimeCB& time_cb,
const NaturalSizeChangedCB& size_changed_cb,
const base::Closure& ended_cb,
const PipelineStatusCB& error_cb,
const TimeDeltaCB& get_time_cb,
const TimeDeltaCB& get_duration_cb));
MOCK_METHOD10(Initialize, void(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb,
const TimeCB& time_cb,
const NaturalSizeChangedCB& size_changed_cb,
const base::Closure& ended_cb,
const PipelineStatusCB& error_cb,
const TimeDeltaCB& get_time_cb,
const TimeDeltaCB& get_duration_cb));
MOCK_METHOD1(Play, void(const base::Closure& callback));
MOCK_METHOD1(Pause, void(const base::Closure& callback));
MOCK_METHOD1(Flush, void(const base::Closure& callback));
MOCK_METHOD2(Preroll, void(base::TimeDelta time, const PipelineStatusCB& cb));
MOCK_METHOD1(Stop, void(const base::Closure& callback));
MOCK_METHOD1(SetPlaybackRate, void(float playback_rate));
MOCK_METHOD0(PrepareForShutdownHack, void());
protected:
virtual ~MockVideoRenderer();
......
......@@ -63,7 +63,6 @@ media::PipelineStatus PipelineStatusNotification::status() {
struct Pipeline::PipelineInitState {
scoped_refptr<AudioDecoder> audio_decoder;
scoped_refptr<VideoDecoder> video_decoder;
};
Pipeline::Pipeline(MessageLoop* message_loop, MediaLog* media_log)
......@@ -584,14 +583,8 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
DCHECK(message_loop_->BelongsToCurrentThread());
if (last_stage_status != PIPELINE_OK) {
// Currently only VideoDecoders have a recoverable error code.
if (state_ == kInitVideoDecoder &&
last_stage_status == DECODER_ERROR_NOT_SUPPORTED) {
state_ = kInitAudioRenderer;
} else {
SetError(last_stage_status);
return;
}
SetError(last_stage_status);
return;
}
// If we have received the stop or error signal, return immediately.
......@@ -601,7 +594,6 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
DCHECK(state_ == kInitDemuxer ||
state_ == kInitAudioDecoder ||
state_ == kInitAudioRenderer ||
state_ == kInitVideoDecoder ||
state_ == kInitVideoRenderer);
// Demuxer created, create audio decoder.
......@@ -624,18 +616,10 @@ void Pipeline::InitializeTask(PipelineStatus last_stage_status) {
}
}
// Assuming audio renderer was created, create video decoder.
// Assuming audio renderer was created, create video renderer.
if (state_ == kInitAudioRenderer) {
// Then perform the stage of initialization, i.e. initialize video decoder.
SetState(kInitVideoDecoder);
if (InitializeVideoDecoder(demuxer_))
return;
}
// Assuming video decoder was created, create video renderer.
if (state_ == kInitVideoDecoder) {
SetState(kInitVideoRenderer);
if (InitializeVideoRenderer(pipeline_init_state_->video_decoder)) {
if (InitializeVideoRenderer(demuxer_->GetStream(DemuxerStream::VIDEO))) {
base::AutoLock auto_lock(lock_);
has_video_ = true;
return;
......@@ -683,10 +667,8 @@ void Pipeline::StopTask(const base::Closure& stop_cb) {
return;
}
if (video_decoder_) {
video_decoder_->PrepareForShutdownHack();
video_decoder_ = NULL;
}
if (video_renderer_)
video_renderer_->PrepareForShutdownHack();
if (tearing_down_ && status_ != PIPELINE_OK) {
// If we are stopping due to SetError(), stop normally instead of
......@@ -970,7 +952,6 @@ void Pipeline::TeardownStateTransitionTask() {
case kInitDemuxer:
case kInitAudioDecoder:
case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer:
case kSeeking:
case kStarting:
......@@ -1064,34 +1045,6 @@ bool Pipeline::InitializeAudioDecoder(
return true;
}
bool Pipeline::InitializeVideoDecoder(
const scoped_refptr<Demuxer>& demuxer) {
DCHECK(message_loop_->BelongsToCurrentThread());
DCHECK(IsPipelineOk());
DCHECK(demuxer);
scoped_refptr<DemuxerStream> stream =
demuxer->GetStream(DemuxerStream::VIDEO);
if (!stream)
return false;
filter_collection_->SelectVideoDecoder(&pipeline_init_state_->video_decoder);
if (!pipeline_init_state_->video_decoder) {
SetError(PIPELINE_ERROR_REQUIRED_FILTER_MISSING);
return false;
}
pipeline_init_state_->video_decoder->Initialize(
stream,
base::Bind(&Pipeline::OnFilterInitialize, this),
base::Bind(&Pipeline::OnUpdateStatistics, this));
video_decoder_ = pipeline_init_state_->video_decoder;
return true;
}
bool Pipeline::InitializeAudioRenderer(
const scoped_refptr<AudioDecoder>& decoder) {
DCHECK(message_loop_->BelongsToCurrentThread());
......@@ -1118,11 +1071,11 @@ bool Pipeline::InitializeAudioRenderer(
}
bool Pipeline::InitializeVideoRenderer(
const scoped_refptr<VideoDecoder>& decoder) {
const scoped_refptr<DemuxerStream>& stream) {
DCHECK(message_loop_->BelongsToCurrentThread());
DCHECK(IsPipelineOk());
if (!decoder)
if (!stream)
return false;
filter_collection_->SelectVideoRenderer(&video_renderer_);
......@@ -1132,7 +1085,8 @@ bool Pipeline::InitializeVideoRenderer(
}
video_renderer_->Initialize(
decoder,
stream,
*filter_collection_->GetVideoDecoders(),
base::Bind(&Pipeline::OnFilterInitialize, this),
base::Bind(&Pipeline::OnUpdateStatistics, this),
base::Bind(&Pipeline::OnVideoTimeUpdate, this),
......@@ -1141,6 +1095,7 @@ bool Pipeline::InitializeVideoRenderer(
base::Bind(&Pipeline::SetError, this),
base::Bind(&Pipeline::GetMediaTime, this),
base::Bind(&Pipeline::GetMediaDuration, this));
filter_collection_->GetVideoDecoders()->clear();
return true;
}
......@@ -1174,7 +1129,6 @@ void Pipeline::TearDownPipeline() {
case kInitDemuxer:
case kInitAudioDecoder:
case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer:
// Make it look like initialization was successful.
filter_collection_.reset();
......
......@@ -31,7 +31,6 @@ class AudioDecoder;
class Clock;
class FilterCollection;
class MediaLog;
class VideoDecoder;
class VideoRenderer;
// Adapter for using asynchronous Pipeline methods in code that wants to run
......@@ -214,7 +213,6 @@ class MEDIA_EXPORT Pipeline
kInitDemuxer,
kInitAudioDecoder,
kInitAudioRenderer,
kInitVideoDecoder,
kInitVideoRenderer,
kPausing,
kSeeking,
......@@ -352,16 +350,16 @@ class MEDIA_EXPORT Pipeline
// Returns true if the asynchronous action of creating decoder has started.
// Returns false if this method did nothing because the corresponding
// audio/video stream does not exist.
// audio stream does not exist.
bool InitializeAudioDecoder(const scoped_refptr<Demuxer>& demuxer);
bool InitializeVideoDecoder(const scoped_refptr<Demuxer>& demuxer);
// Initializes a renderer and connects it with decoder. Returns true if the
// asynchronous action of creating renderer has started. Returns
// false if this method did nothing because the corresponding audio/video
// stream does not exist.
bool InitializeAudioRenderer(const scoped_refptr<AudioDecoder>& decoder);
bool InitializeVideoRenderer(const scoped_refptr<VideoDecoder>& decoder);
bool InitializeVideoRenderer(
const scoped_refptr<DemuxerStream>& stream);
// Kicks off destroying filters. Called by StopTask() and ErrorChangedTask().
// When we start to tear down the pipeline, we will consider two cases:
......@@ -496,17 +494,16 @@ class MEDIA_EXPORT Pipeline
PipelineStatusCB ended_cb_;
PipelineStatusCB error_cb_;
// Decoder reference used for signalling imminent shutdown.
// This is a HACK necessary because WebMediaPlayerImpl::Destroy() holds the
// renderer thread loop hostage for until PipelineImpl::Stop() calls its
// callback.
// This reference should only be used for this hack and no other purposes.
// http://crbug.com/110228 tracks removing this hack.
scoped_refptr<VideoDecoder> video_decoder_;
// Renderer references used for setting the volume and determining
// Audio renderer reference used for setting the volume and determining
// when playback has finished.
scoped_refptr<AudioRenderer> audio_renderer_;
// Video Renderer reference used for determining when playback has finished
// and for signalling imminent shutdown.
// The signalling imminent shutdown is a HACK necessary because
// WebMediaPlayerImpl::Destroy() holds the render thread loop hostage
// until PipelineImpl::Stop() calls its callback.
// http://crbug.com/110228 tracks removing this hack.
scoped_refptr<VideoRenderer> video_renderer_;
// Demuxer reference used for setting the preload value.
......
......@@ -54,10 +54,18 @@ ACTION(RunPipelineStatusCB) {
arg1.Run(PIPELINE_OK);
}
ACTION(RunPipelineStatusCB2) {
arg2.Run(PIPELINE_OK);
}
ACTION_P(RunPipelineStatusCBWithStatus, status) {
arg1.Run(status);
}
ACTION_P(RunPipelineStatusCB2WithStatus, status) {
arg2.Run(status);
}
// Used for setting expectations on pipeline callbacks. Using a StrictMock
// also lets us test for missing callbacks.
class CallbackHelper {
......@@ -164,13 +172,6 @@ class PipelineTest : public ::testing::Test {
return stream;
}
// Sets up expectations to allow the video decoder to initialize.
void InitializeVideoDecoder(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->video_decoder(),
Initialize(stream, _, _))
.WillOnce(RunPipelineStatusCB());
}
// Sets up expectations to allow the audio decoder to initialize.
void InitializeAudioDecoder(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->audio_decoder(), Initialize(stream, _, _))
......@@ -178,11 +179,10 @@ class PipelineTest : public ::testing::Test {
}
// Sets up expectations to allow the video renderer to initialize.
void InitializeVideoRenderer() {
void InitializeVideoRenderer(const scoped_refptr<DemuxerStream>& stream) {
EXPECT_CALL(*mocks_->video_renderer(), Initialize(
scoped_refptr<VideoDecoder>(mocks_->video_decoder()),
_, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB());
stream, _, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB2());
EXPECT_CALL(*mocks_->video_renderer(), SetPlaybackRate(0.0f));
// Startup sequence.
......@@ -419,8 +419,7 @@ TEST_F(PipelineTest, VideoStream) {
streams.push_back(video_stream());
InitializeDemuxer(&streams);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio());
......@@ -437,8 +436,7 @@ TEST_F(PipelineTest, AudioVideoStream) {
InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer();
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_TRUE(pipeline_->HasAudio());
......@@ -455,8 +453,7 @@ TEST_F(PipelineTest, Seek) {
InitializeDemuxer(&streams, base::TimeDelta::FromSeconds(3000));
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer();
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
// Initialize then seek!
InitializePipeline(PIPELINE_OK);
......@@ -492,8 +489,7 @@ TEST_F(PipelineTest, Properties) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_EQ(kDuration.ToInternalValue(),
......@@ -509,8 +505,7 @@ TEST_F(PipelineTest, GetBufferedTimeRanges) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
......@@ -564,8 +559,7 @@ TEST_F(PipelineTest, DisableAudioRenderer) {
InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer();
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_TRUE(pipeline_->HasAudio());
......@@ -589,8 +583,7 @@ TEST_F(PipelineTest, DisableAudioRendererDuringInit) {
InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer(true);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
EXPECT_CALL(*mocks_->demuxer(),
OnAudioRendererDisabled());
......@@ -614,8 +607,7 @@ TEST_F(PipelineTest, EndedCallback) {
InitializeDemuxer(&streams);
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer();
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
// The ended callback shouldn't run until both renderers have ended.
......@@ -649,8 +641,7 @@ TEST_F(PipelineTest, AudioStreamShorterThanVideo) {
InitializeDemuxer(&streams, duration);
InitializeAudioDecoder(audio_stream());
InitializeAudioRenderer();
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_EQ(0, pipeline_->GetMediaTime().ToInternalValue());
......@@ -786,8 +777,7 @@ TEST_F(PipelineTest, StartTimeIsZero) {
const base::TimeDelta kDuration = base::TimeDelta::FromSeconds(100);
InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio());
......@@ -808,8 +798,7 @@ TEST_F(PipelineTest, StartTimeIsNonZero) {
streams.push_back(video_stream());
InitializeDemuxer(&streams, kDuration);
InitializeVideoDecoder(video_stream());
InitializeVideoRenderer();
InitializeVideoRenderer(video_stream());
InitializePipeline(PIPELINE_OK);
EXPECT_FALSE(pipeline_->HasAudio());
......@@ -937,7 +926,6 @@ class PipelineTeardownTest : public PipelineTest {
kInitDemuxer,
kInitAudioDecoder,
kInitAudioRenderer,
kInitVideoDecoder,
kInitVideoRenderer,
kPausing,
kFlushing,
......@@ -960,7 +948,6 @@ class PipelineTeardownTest : public PipelineTest {
case kInitDemuxer:
case kInitAudioDecoder:
case kInitAudioRenderer:
case kInitVideoDecoder:
case kInitVideoRenderer:
DoInitialize(state, stop_or_error);
break;
......@@ -1080,36 +1067,17 @@ class PipelineTeardownTest : public PipelineTest {
EXPECT_CALL(*mocks_->audio_renderer(), Initialize(_, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB());
if (state == kInitVideoDecoder) {
if (stop_or_error == kStop) {
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB()));
EXPECT_CALL(callbacks_, OnStop());
} else {
status = PIPELINE_ERROR_DECODE;
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(RunPipelineStatusCBWithStatus(status));
}
EXPECT_CALL(*mocks_->demuxer(), Stop(_)).WillOnce(RunClosure());
EXPECT_CALL(*mocks_->audio_renderer(), Stop(_)).WillOnce(RunClosure());
return status;
}
EXPECT_CALL(*mocks_->video_decoder(), Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB());
if (state == kInitVideoRenderer) {
if (stop_or_error == kStop) {
EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _))
.WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB()));
Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(DoAll(Stop(pipeline_, stop_cb), RunPipelineStatusCB2()));
EXPECT_CALL(callbacks_, OnStop());
} else {
status = PIPELINE_ERROR_INITIALIZATION_FAILED;
EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCBWithStatus(status));
Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB2WithStatus(status));
}
EXPECT_CALL(*mocks_->demuxer(), Stop(_)).WillOnce(RunClosure());
......@@ -1119,8 +1087,8 @@ class PipelineTeardownTest : public PipelineTest {
}
EXPECT_CALL(*mocks_->video_renderer(),
Initialize(_, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB());
Initialize(_, _, _, _, _, _, _, _, _, _))
.WillOnce(RunPipelineStatusCB2());
// If we get here it's a successful initialization.
EXPECT_CALL(*mocks_->demuxer(), SetPlaybackRate(0.0f));
......@@ -1314,7 +1282,6 @@ class PipelineTeardownTest : public PipelineTest {
INSTANTIATE_TEARDOWN_TEST(Stop, InitDemuxer);
INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioDecoder);
INSTANTIATE_TEARDOWN_TEST(Stop, InitAudioRenderer);
INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoDecoder);
INSTANTIATE_TEARDOWN_TEST(Stop, InitVideoRenderer);
INSTANTIATE_TEARDOWN_TEST(Stop, Pausing);
INSTANTIATE_TEARDOWN_TEST(Stop, Flushing);
......@@ -1326,7 +1293,6 @@ INSTANTIATE_TEARDOWN_TEST(Stop, Playing);
INSTANTIATE_TEARDOWN_TEST(Error, InitDemuxer);
INSTANTIATE_TEARDOWN_TEST(Error, InitAudioDecoder);
INSTANTIATE_TEARDOWN_TEST(Error, InitAudioRenderer);
INSTANTIATE_TEARDOWN_TEST(Error, InitVideoDecoder);
INSTANTIATE_TEARDOWN_TEST(Error, InitVideoRenderer);
INSTANTIATE_TEARDOWN_TEST(Error, Pausing);
INSTANTIATE_TEARDOWN_TEST(Error, Flushing);
......
......@@ -5,6 +5,8 @@
#ifndef MEDIA_BASE_VIDEO_RENDERER_H_
#define MEDIA_BASE_VIDEO_RENDERER_H_
#include <list>
#include "base/callback.h"
#include "base/memory/ref_counted.h"
#include "base/time.h"
......@@ -17,11 +19,14 @@ class Size;
namespace media {
class DemuxerStream;
class VideoDecoder;
class MEDIA_EXPORT VideoRenderer
: public base::RefCountedThreadSafe<VideoRenderer> {
public:
typedef std::list<scoped_refptr<VideoDecoder> > VideoDecoderList;
// Used to update the pipeline's clock time. The parameter is the time that
// the clock should not exceed.
typedef base::Callback<void(base::TimeDelta)> TimeCB;
......@@ -50,7 +55,8 @@ class MEDIA_EXPORT VideoRenderer
// |get_time_cb| is used to query the current media playback time.
//
// |get_duration_cb| is used to query the media duration.
virtual void Initialize(const scoped_refptr<VideoDecoder>& decoder,
virtual void Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb,
const TimeCB& time_cb,
......@@ -85,6 +91,13 @@ class MEDIA_EXPORT VideoRenderer
// Updates the current playback rate.
virtual void SetPlaybackRate(float playback_rate) = 0;
// Prepare decoder for shutdown. This is a HACK needed because
// PipelineImpl::Stop() goes through a Pause/Flush/Stop dance to all its
// filters, waiting for each state transition to complete before starting the
// next, but WebMediaPlayerImpl::Destroy() holds the renderer loop hostage for
// the duration. http://crbug.com/110228 tracks removing this.
virtual void PrepareForShutdownHack() = 0;
protected:
friend class base::RefCountedThreadSafe<VideoRenderer>;
......
......@@ -197,7 +197,8 @@ PipelineIntegrationTestBase::CreateFilterCollection(
base::Unretained(message_loop_factory_.get()),
"VideoDecoderThread"),
decryptor);
collection->AddVideoDecoder(decoder);
collection->GetVideoDecoders()->push_back(decoder);
// Disable frame dropping if hashing is enabled.
renderer_ = new VideoRendererBase(
base::Bind(&PipelineIntegrationTestBase::OnVideoRendererPaint,
......
......@@ -112,7 +112,8 @@ void VideoRendererBase::Preroll(base::TimeDelta time,
AttemptRead_Locked();
}
void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
void VideoRendererBase::Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb,
const TimeCB& max_time_cb,
......@@ -122,7 +123,9 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
const TimeDeltaCB& get_time_cb,
const TimeDeltaCB& get_duration_cb) {
base::AutoLock auto_lock(lock_);
DCHECK(decoder);
DCHECK(stream);
DCHECK(!decoders.empty());
DCHECK_EQ(stream->type(), DemuxerStream::VIDEO);
DCHECK(!init_cb.is_null());
DCHECK(!statistics_cb.is_null());
DCHECK(!max_time_cb.is_null());
......@@ -131,8 +134,8 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
DCHECK(!get_time_cb.is_null());
DCHECK(!get_duration_cb.is_null());
DCHECK_EQ(kUninitialized, state_);
decoder_ = decoder;
init_cb_ = init_cb;
statistics_cb_ = statistics_cb;
max_time_cb_ = max_time_cb;
size_changed_cb_ = size_changed_cb;
......@@ -141,20 +144,65 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
get_time_cb_ = get_time_cb;
get_duration_cb_ = get_duration_cb;
scoped_ptr<VideoDecoderList> decoder_list(new VideoDecoderList(decoders));
InitializeNextDecoder(stream, decoder_list.Pass());
}
void VideoRendererBase::InitializeNextDecoder(
const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders) {
lock_.AssertAcquired();
DCHECK(!decoders->empty());
scoped_refptr<VideoDecoder> decoder = decoders->front();
decoders->pop_front();
DCHECK(decoder);
decoder_ = decoder;
base::AutoUnlock auto_unlock(lock_);
decoder->Initialize(
demuxer_stream,
base::Bind(&VideoRendererBase::OnDecoderInitDone, this,
demuxer_stream,
base::Passed(&decoders)),
statistics_cb_);
}
void VideoRendererBase::OnDecoderInitDone(
const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders,
PipelineStatus status) {
base::AutoLock auto_lock(lock_);
if (state_ == kStopped)
return;
if (!decoders->empty() && status == DECODER_ERROR_NOT_SUPPORTED) {
InitializeNextDecoder(demuxer_stream, decoders.Pass());
return;
}
if (status != PIPELINE_OK) {
state_ = kError;
base::ResetAndReturn(&init_cb_).Run(status);
return;
}
// We're all good! Consider ourselves flushed. (ThreadMain() should never
// see us in the kUninitialized state).
// Since we had an initial Preroll(), we consider ourself flushed, because we
// have not populated any buffers yet.
state_ = kFlushed;
set_opaque_cb_.Run(!decoder->HasAlpha());
set_opaque_cb_.Run(!decoder_->HasAlpha());
set_opaque_cb_.Reset();
// Create our video thread.
if (!base::PlatformThread::Create(0, this, &thread_)) {
NOTREACHED() << "Video thread creation failed";
state_ = kError;
init_cb.Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
base::ResetAndReturn(&init_cb_).Run(PIPELINE_ERROR_INITIALIZATION_FAILED);
return;
}
......@@ -163,7 +211,13 @@ void VideoRendererBase::Initialize(const scoped_refptr<VideoDecoder>& decoder,
// TODO(scherkus): find out if this is necessary, but it seems to help.
::SetThreadPriority(thread_, THREAD_PRIORITY_ABOVE_NORMAL);
#endif // defined(OS_WIN)
init_cb.Run(PIPELINE_OK);
base::ResetAndReturn(&init_cb_).Run(PIPELINE_OK);
}
void VideoRendererBase::PrepareForShutdownHack() {
base::AutoLock auto_lock(lock_);
if (decoder_)
decoder_->PrepareForShutdownHack();
}
// PlatformThread::Delegate implementation.
......
......@@ -11,6 +11,7 @@
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
#include "base/threading/platform_thread.h"
#include "media/base/demuxer_stream.h"
#include "media/base/pipeline_status.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
......@@ -50,7 +51,8 @@ class MEDIA_EXPORT VideoRendererBase
bool drop_frames);
// VideoRenderer implementation.
virtual void Initialize(const scoped_refptr<VideoDecoder>& decoder,
virtual void Initialize(const scoped_refptr<DemuxerStream>& stream,
const VideoDecoderList& decoders,
const PipelineStatusCB& init_cb,
const StatisticsCB& statistics_cb,
const TimeCB& max_time_cb,
......@@ -66,6 +68,7 @@ class MEDIA_EXPORT VideoRendererBase
const PipelineStatusCB& cb) OVERRIDE;
virtual void Stop(const base::Closure& callback) OVERRIDE;
virtual void SetPlaybackRate(float playback_rate) OVERRIDE;
virtual void PrepareForShutdownHack() OVERRIDE;
// PlatformThread::Delegate implementation.
virtual void ThreadMain() OVERRIDE;
......@@ -119,6 +122,18 @@ class MEDIA_EXPORT VideoRendererBase
// |size_changed_cb_| if the natural size changes.
void SetCurrentFrameToNextReadyFrame();
// Pops the front of |decoders|, assigns it to |decoder_| and then
// calls initialize on the new decoder.
void InitializeNextDecoder(const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders);
// Called when |decoder_| initialization completes.
// |demuxer_stream| & |decoders| are used if initialization failed and
// InitializeNextDecoder() needs to be called again.
void OnDecoderInitDone(const scoped_refptr<DemuxerStream>& demuxer_stream,
scoped_ptr<VideoDecoderList> decoders,
PipelineStatus status);
// Used for accessing data members.
base::Lock lock_;
......@@ -210,6 +225,7 @@ class MEDIA_EXPORT VideoRendererBase
PipelineStatusCB preroll_cb_;
// Event callbacks.
PipelineStatusCB init_cb_;
StatisticsCB statistics_cb_;
TimeCB max_time_cb_;
NaturalSizeChangedCB size_changed_cb_;
......
......@@ -34,10 +34,15 @@ static const int kVideoDuration = kFrameDuration * 100;
static const int kEndOfStream = -1;
static const gfx::Size kNaturalSize(16u, 16u);
ACTION_P(RunPipelineStatusCB1, status) {
arg1.Run(status);
}
class VideoRendererBaseTest : public ::testing::Test {
public:
VideoRendererBaseTest()
: decoder_(new MockVideoDecoder()),
demuxer_stream_(new MockDemuxerStream()),
cv_(&lock_),
event_(false, false),
timeout_(TestTimeouts::action_timeout()),
......@@ -51,6 +56,9 @@ class VideoRendererBaseTest : public ::testing::Test {
base::Bind(&VideoRendererBaseTest::OnSetOpaque, base::Unretained(this)),
true);
EXPECT_CALL(*demuxer_stream_, type())
.WillRepeatedly(Return(DemuxerStream::VIDEO));
// We expect these to be called but we don't care how/when.
EXPECT_CALL(*decoder_, Stop(_))
.WillRepeatedly(RunClosure());
......@@ -98,13 +106,29 @@ class VideoRendererBaseTest : public ::testing::Test {
InSequence s;
EXPECT_CALL(*decoder_, Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB1(PIPELINE_OK));
// Set playback rate before anything else happens.
renderer_->SetPlaybackRate(1.0f);
// Initialize, we shouldn't have any reads.
InitializeRenderer(PIPELINE_OK);
// We expect the video size to be set.
EXPECT_CALL(*this, OnNaturalSizeChanged(kNaturalSize));
// Start prerolling.
Preroll(0);
}
void InitializeRenderer(PipelineStatus expected_status) {
VideoRendererBase::VideoDecoderList decoders;
decoders.push_back(decoder_);
renderer_->Initialize(
decoder_,
NewExpectedStatusCB(PIPELINE_OK),
demuxer_stream_,
decoders,
NewExpectedStatusCB(expected_status),
base::Bind(&MockStatisticsCB::OnStatistics,
base::Unretained(&statistics_cb_object_)),
base::Bind(&VideoRendererBaseTest::OnTimeUpdate,
......@@ -116,12 +140,6 @@ class VideoRendererBaseTest : public ::testing::Test {
base::Bind(&VideoRendererBaseTest::GetTime, base::Unretained(this)),
base::Bind(&VideoRendererBaseTest::GetDuration,
base::Unretained(this)));
// We expect the video size to be set.
EXPECT_CALL(*this, OnNaturalSizeChanged(kNaturalSize));
// Start prerolling.
Preroll(0);
}
// Instead of immediately satisfying a decoder Read request, queue it up.
......@@ -311,6 +329,7 @@ class VideoRendererBaseTest : public ::testing::Test {
// Fixture members.
scoped_refptr<VideoRendererBase> renderer_;
scoped_refptr<MockVideoDecoder> decoder_;
scoped_refptr<MockDemuxerStream> demuxer_stream_;
MockStatisticsCB statistics_cb_object_;
// Receives all the buffers that renderer had provided to |decoder_|.
......@@ -671,4 +690,13 @@ TEST_F(VideoRendererBaseTest, AbortPendingRead_Preroll) {
Shutdown();
}
TEST_F(VideoRendererBaseTest, VideoDecoder_InitFailure) {
InSequence s;
EXPECT_CALL(*decoder_, Initialize(_, _, _))
.WillOnce(RunPipelineStatusCB1(PIPELINE_ERROR_DECODE));
InitializeRenderer(PIPELINE_ERROR_DECODE);
}
} // namespace media
......@@ -77,7 +77,7 @@ bool Movie::Open(const wchar_t* url, VideoRendererBase* video_renderer) {
base::Bind(&MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory_.get()),
"AudioDecoderThread")));
collection->AddVideoDecoder(new FFmpegVideoDecoder(
collection->GetVideoDecoders()->push_back(new FFmpegVideoDecoder(
base::Bind(&MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory_.get()),
"VideoDecoderThread"),
......
......@@ -119,7 +119,7 @@ bool InitPipeline(MessageLoop* message_loop,
base::Bind(&media::MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory),
"AudioDecoderThread")));
collection->AddVideoDecoder(new media::FFmpegVideoDecoder(
collection->GetVideoDecoders()->push_back(new media::FFmpegVideoDecoder(
base::Bind(&media::MessageLoopFactory::GetMessageLoop,
base::Unretained(message_loop_factory),
"VideoDecoderThread"),
......
......@@ -33,7 +33,7 @@ static void AddDefaultDecodersToCollection(
base::Unretained(message_loop_factory),
"VideoDecoderThread"),
decryptor);
filter_collection->AddVideoDecoder(ffmpeg_video_decoder);
filter_collection->GetVideoDecoders()->push_back(ffmpeg_video_decoder);
}
bool BuildMediaStreamCollection(const WebKit::WebURL& url,
......@@ -48,16 +48,13 @@ bool BuildMediaStreamCollection(const WebKit::WebURL& url,
if (!video_decoder)
return false;
// Remove all other decoders and just use the MediaStream one.
// Remove any "traditional" decoders (e.g. GpuVideoDecoder) from the
// collection.
// NOTE: http://crbug.com/110800 is about replacing this ad-hockery with
// something more designed.
scoped_refptr<media::VideoDecoder> old_videodecoder;
do {
filter_collection->SelectVideoDecoder(&old_videodecoder);
} while (old_videodecoder);
filter_collection->AddVideoDecoder(video_decoder);
filter_collection->GetVideoDecoders()->clear();
filter_collection->GetVideoDecoders()->push_back(video_decoder);
filter_collection->SetDemuxer(new media::DummyDemuxer(true, false));
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment