Commit 956ee174 authored by mark a. foltz's avatar mark a. foltz Committed by Commit Bot

[Code Health] Convert chromecast/media to base::BindOnce/BindRepeating.

Note this required converting chromecast::media::{AvPipelineClient,
VideoPipelineClient,MediaPipelineClient} to be move-only, as they now
own a base::OnceClosure.


Bug: 1007649
Change-Id: I4ff9d43085cd919e939093854d1e749ce43f83e7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2340510
Commit-Queue: mark a. foltz <mfoltz@chromium.org>
Reviewed-by: default avatarKenneth MacKay <kmackay@chromium.org>
Cr-Commit-Position: refs/heads/master@{#798051}
parent 5d46ca11
......@@ -82,11 +82,11 @@ scoped_refptr<BufferingState> BufferingController::AddStream(
// Add a new stream to the list of streams being monitored.
scoped_refptr<BufferingState> buffering_state(new BufferingState(
stream_id,
config_,
base::Bind(&BufferingController::OnBufferingStateChanged, weak_this_,
false, false),
base::Bind(&BufferingController::UpdateHighLevelThreshold, weak_this_)));
stream_id, config_,
base::BindRepeating(&BufferingController::OnBufferingStateChanged,
weak_this_, false, false),
base::BindRepeating(&BufferingController::UpdateHighLevelThreshold,
weak_this_)));
stream_list_.push_back(buffering_state);
// Update the state and force a notification to the streams.
......
......@@ -23,7 +23,7 @@ class BufferingState;
class BufferingController {
public:
typedef base::Callback<void(bool)> BufferingNotificationCB;
typedef base::RepeatingCallback<void(bool)> BufferingNotificationCB;
// Creates a buffering controller where the conditions to trigger rebuffering
// are given by |config|. The whole point of the buffering controller is to
......
......@@ -60,8 +60,9 @@ BufferingControllerTest::BufferingControllerTest() {
new BufferingConfig(low_level_threshold, high_level_threshold));
buffering_controller_.reset(new BufferingController(
buffering_config,
base::Bind(&MockBufferingControllerClient::OnBufferingNotification,
base::Unretained(&client_))));
base::BindRepeating(
&MockBufferingControllerClient::OnBufferingNotification,
base::Unretained(&client_))));
}
TEST_F(BufferingControllerTest, OneStream_Typical) {
......
......@@ -26,7 +26,8 @@ class DecoderBufferBase;
// Fetch some data from another CodedFrameProvider up to a certain size limit.
class BufferingFrameProvider : public CodedFrameProvider {
public:
typedef base::Callback<void(const scoped_refptr<DecoderBufferBase>&, bool)>
typedef base::RepeatingCallback<void(const scoped_refptr<DecoderBufferBase>&,
bool)>
FrameBufferedCB;
// Creates a frame provider that buffers coded frames up to the
......
......@@ -25,7 +25,7 @@ BufferingConfig::~BufferingConfig() {
BufferingState::BufferingState(const std::string& stream_id,
const scoped_refptr<BufferingConfig>& config,
const base::Closure& state_changed_cb,
const base::RepeatingClosure& state_changed_cb,
const HighLevelBufferCB& high_level_buffer_cb)
: stream_id_(stream_id),
config_(config),
......
......@@ -43,7 +43,7 @@ class BufferingConfig : public base::RefCountedThreadSafe<BufferingConfig> {
class BufferingState
: public base::RefCountedThreadSafe<BufferingState> {
public:
typedef base::Callback<void(base::TimeDelta)> HighLevelBufferCB;
typedef base::RepeatingCallback<void(base::TimeDelta)> HighLevelBufferCB;
enum State {
kLowLevel,
......@@ -59,7 +59,7 @@ class BufferingState
// the current high buffer level.
BufferingState(const std::string& stream_id,
const scoped_refptr<BufferingConfig>& config,
const base::Closure& state_changed_cb,
const base::RepeatingClosure& state_changed_cb,
const HighLevelBufferCB& high_level_buffer_cb);
// Returns the buffering state.
......@@ -110,7 +110,7 @@ class BufferingState
scoped_refptr<BufferingConfig> const config_;
// Callback invoked each time there is a change of state.
base::Closure state_changed_cb_;
base::RepeatingClosure state_changed_cb_;
// Callback invoked to adjust the high buffer level.
HighLevelBufferCB high_level_buffer_cb_;
......
......@@ -28,8 +28,8 @@ const size_t kMaxAudioFrameSize = 32 * 1024;
}
AudioPipelineImpl::AudioPipelineImpl(CmaBackend::AudioDecoder* decoder,
const AvPipelineClient& client)
: AvPipelineImpl(decoder, client), audio_decoder_(decoder) {
AvPipelineClient client)
: AvPipelineImpl(decoder, std::move(client)), audio_decoder_(decoder) {
DCHECK(audio_decoder_);
}
......
......@@ -26,8 +26,7 @@ class CodedFrameProvider;
class AudioPipelineImpl : public AvPipelineImpl {
public:
AudioPipelineImpl(CmaBackend::AudioDecoder* decoder,
const AvPipelineClient& client);
AudioPipelineImpl(CmaBackend::AudioDecoder* decoder, AvPipelineClient client);
~AudioPipelineImpl() override;
::media::PipelineStatus Initialize(
......
......@@ -91,7 +91,6 @@ class CastCdmContextForTest : public CastCdmContext {
private:
bool license_installed_;
base::Closure new_key_cb_;
::media::CallbackRegistry<::media::CdmContext::EventCB::RunType>
event_callbacks_;
......@@ -138,8 +137,8 @@ class PipelineHelper {
::media::CHANNEL_LAYOUT_STEREO, 44100, ::media::EmptyExtraData(),
::media::EncryptionScheme::kUnencrypted);
AvPipelineClient client;
client.eos_cb = base::Bind(&PipelineHelper::OnEos, base::Unretained(this),
STREAM_AUDIO);
client.eos_cb = base::BindOnce(&PipelineHelper::OnEos,
base::Unretained(this), STREAM_AUDIO);
EXPECT_CALL(*pipeline_backend_, CreateAudioDecoder())
.Times(1)
.WillOnce(Return(&audio_decoder_));
......@@ -147,7 +146,7 @@ class PipelineHelper {
.Times(1)
.WillOnce(SaveArg<0>(&audio_decoder_delegate_));
::media::PipelineStatus status = media_pipeline_->InitializeAudio(
audio_config, client, CreateFrameProvider());
audio_config, std::move(client), CreateFrameProvider());
ASSERT_EQ(::media::PIPELINE_OK, status);
}
if (have_video_) {
......@@ -159,7 +158,7 @@ class PipelineHelper {
gfx::Size(640, 480), gfx::Rect(0, 0, 640, 480), gfx::Size(640, 480),
::media::EmptyExtraData(), ::media::EncryptionScheme()));
VideoPipelineClient client;
client.av_pipeline_client.eos_cb = base::Bind(
client.av_pipeline_client.eos_cb = base::BindOnce(
&PipelineHelper::OnEos, base::Unretained(this), STREAM_VIDEO);
EXPECT_CALL(*pipeline_backend_, CreateVideoDecoder())
.Times(1)
......@@ -168,7 +167,7 @@ class PipelineHelper {
.Times(1)
.WillOnce(SaveArg<0>(&video_decoder_delegate_));
::media::PipelineStatus status = media_pipeline_->InitializeVideo(
video_configs, client, CreateFrameProvider());
video_configs, std::move(client), CreateFrameProvider());
ASSERT_EQ(::media::PIPELINE_OK, status);
}
}
......@@ -191,8 +190,8 @@ class PipelineHelper {
EXPECT_CALL(*pipeline_backend_, Pause());
}
void Start(const base::Closure& eos_cb) {
eos_cb_ = eos_cb;
void Start(base::OnceClosure eos_cb) {
eos_cb_ = std::move(eos_cb);
eos_[STREAM_AUDIO] = !media_pipeline_->HasAudio();
eos_[STREAM_VIDEO] = !media_pipeline_->HasVideo();
last_push_pts_[STREAM_AUDIO] = std::numeric_limits<int64_t>::min();
......@@ -211,9 +210,9 @@ class PipelineHelper {
media_pipeline_->SetPlaybackRate(1.0f);
}
void SetCdm() { media_pipeline_->SetCdm(cdm_context_.get()); }
void Flush(const base::Closure& flush_cb) {
void Flush(base::OnceClosure flush_cb) {
EXPECT_CALL(*pipeline_backend_, Stop()).Times(1);
media_pipeline_->Flush(flush_cb);
media_pipeline_->Flush(std::move(flush_cb));
}
void Stop() {
media_pipeline_.reset();
......@@ -254,7 +253,7 @@ class PipelineHelper {
void OnEos(Stream stream) {
eos_[stream] = true;
if (eos_[STREAM_AUDIO] && eos_[STREAM_VIDEO] && !eos_cb_.is_null())
eos_cb_.Run();
std::move(eos_cb_).Run();
}
bool have_audio_;
......@@ -262,7 +261,7 @@ class PipelineHelper {
bool encrypted_;
bool eos_[2];
int64_t last_push_pts_[2];
base::Closure eos_cb_;
base::OnceClosure eos_cb_;
std::unique_ptr<CastCdmContextForTest> cdm_context_;
MockCmaBackend* pipeline_backend_;
NiceMock<MockCmaBackend::AudioDecoder> audio_decoder_;
......@@ -307,13 +306,13 @@ static void VerifyPlay(PipelineHelper* pipeline_helper) {
}
TEST_P(AudioVideoPipelineImplTest, Play) {
base::Closure verify_task =
base::Bind(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
base::OnceClosure verify_task =
base::BindOnce(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
pipeline_helper_->SetPipelineStartExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()), verify_task));
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
std::move(verify_task)));
base::RunLoop().Run();
}
......@@ -332,33 +331,33 @@ static void VerifyNotReached() {
}
TEST_P(AudioVideoPipelineImplTest, Flush) {
base::Closure verify_task =
base::Bind(&VerifyFlush, base::Unretained(pipeline_helper_.get()));
base::OnceClosure verify_task =
base::BindOnce(&VerifyFlush, base::Unretained(pipeline_helper_.get()));
pipeline_helper_->SetPipelineStartFlushExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
base::Bind(&VerifyNotReached)));
base::BindOnce(&VerifyNotReached)));
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Flush,
base::Unretained(pipeline_helper_.get()), verify_task));
FROM_HERE, base::BindOnce(&PipelineHelper::Flush,
base::Unretained(pipeline_helper_.get()),
std::move(verify_task)));
base::RunLoop().Run();
}
TEST_P(AudioVideoPipelineImplTest, FullCycle) {
base::Closure stop_task = base::Bind(
base::OnceClosure stop_task = base::BindOnce(
&PipelineHelper::Stop, base::Unretained(pipeline_helper_.get()));
base::Closure eos_cb =
base::Bind(&PipelineHelper::Flush,
base::Unretained(pipeline_helper_.get()), stop_task);
base::OnceClosure eos_cb = base::BindOnce(
&PipelineHelper::Flush, base::Unretained(pipeline_helper_.get()),
std::move(stop_task));
pipeline_helper_->SetPipelineStartExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()), eos_cb));
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
std::move(eos_cb)));
base::RunLoop().Run();
}
......@@ -390,8 +389,8 @@ class EncryptedAVPipelineImplTest : public ::testing::Test {
// Sets a CDM with license already installed before starting the pipeline.
TEST_F(EncryptedAVPipelineImplTest, SetCdmWithLicenseBeforeStart) {
base::Closure verify_task =
base::Bind(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
base::OnceClosure verify_task =
base::BindOnce(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&PipelineHelper::SetCdm,
base::Unretained(pipeline_helper_.get())));
......@@ -400,21 +399,21 @@ TEST_F(EncryptedAVPipelineImplTest, SetCdmWithLicenseBeforeStart) {
base::Unretained(pipeline_helper_.get())));
pipeline_helper_->SetPipelineStartExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()), verify_task));
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
std::move(verify_task)));
base::RunLoop().Run();
}
// Start the pipeline, then set a CDM with existing license.
TEST_F(EncryptedAVPipelineImplTest, SetCdmWithLicenseAfterStart) {
base::Closure verify_task =
base::Bind(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
base::OnceClosure verify_task =
base::BindOnce(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
pipeline_helper_->SetPipelineStartExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()), verify_task));
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
std::move(verify_task)));
task_environment_.RunUntilIdle();
task_environment_.GetMainThreadTaskRunner()->PostTask(
......@@ -428,13 +427,13 @@ TEST_F(EncryptedAVPipelineImplTest, SetCdmWithLicenseAfterStart) {
// Start the pipeline, set a CDM, and then install the license.
TEST_F(EncryptedAVPipelineImplTest, SetCdmAndInstallLicenseAfterStart) {
base::Closure verify_task =
base::Bind(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
base::OnceClosure verify_task =
base::BindOnce(&VerifyPlay, base::Unretained(pipeline_helper_.get()));
pipeline_helper_->SetPipelineStartExpectations();
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()), verify_task));
FROM_HERE, base::BindOnce(&PipelineHelper::Start,
base::Unretained(pipeline_helper_.get()),
std::move(verify_task)));
task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&PipelineHelper::SetCdm,
base::Unretained(pipeline_helper_.get())));
......
......@@ -10,7 +10,9 @@ namespace media {
AvPipelineClient::AvPipelineClient() {
}
AvPipelineClient::AvPipelineClient(const AvPipelineClient& other) = default;
AvPipelineClient::AvPipelineClient(AvPipelineClient&& other) = default;
AvPipelineClient& AvPipelineClient::operator=(AvPipelineClient&& other) =
default;
AvPipelineClient::~AvPipelineClient() {
}
......
......@@ -14,18 +14,22 @@ namespace chromecast {
namespace media {
struct AvPipelineClient {
typedef base::Callback<void(
base::TimeDelta, base::TimeDelta, base::TimeTicks)> TimeUpdateCB;
typedef base::RepeatingCallback<
void(base::TimeDelta, base::TimeDelta, base::TimeTicks)>
TimeUpdateCB;
AvPipelineClient();
AvPipelineClient(const AvPipelineClient& other);
AvPipelineClient(AvPipelineClient&& other);
AvPipelineClient(const AvPipelineClient& other) = delete;
AvPipelineClient& operator=(AvPipelineClient&& other);
AvPipelineClient& operator=(const AvPipelineClient& other) = delete;
~AvPipelineClient();
// Waiting status notification.
::media::WaitingCB waiting_cb;
// End of stream notification.
base::Closure eos_cb;
base::OnceClosure eos_cb;
// Asynchronous playback error notification.
::media::PipelineStatusCB playback_error_cb;
......
......@@ -30,10 +30,10 @@ namespace chromecast {
namespace media {
AvPipelineImpl::AvPipelineImpl(CmaBackend::Decoder* decoder,
const AvPipelineClient& client)
AvPipelineClient client)
: bytes_decoded_since_last_update_(0),
decoder_(decoder),
client_(client),
client_(std::move(client)),
state_(kUninitialized),
buffered_time_(::media::kNoTimestamp),
playable_buffered_time_(::media::kNoTimestamp),
......@@ -62,7 +62,7 @@ void AvPipelineImpl::SetCodedFrameProvider(
// Wrap the incoming frame provider to add some buffering capabilities.
frame_provider_.reset(new BufferingFrameProvider(
std::move(frame_provider), max_buffer_size, max_frame_size,
base::Bind(&AvPipelineImpl::OnDataBuffered, weak_this_)));
base::BindRepeating(&AvPipelineImpl::OnDataBuffered, weak_this_)));
}
bool AvPipelineImpl::StartPlayingFrom(
......@@ -96,7 +96,7 @@ bool AvPipelineImpl::StartPlayingFrom(
return true;
}
void AvPipelineImpl::Flush(const base::Closure& flush_cb) {
void AvPipelineImpl::Flush(base::OnceClosure flush_cb) {
LOG(INFO) << __FUNCTION__;
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(flush_cb_.is_null());
......@@ -108,7 +108,7 @@ void AvPipelineImpl::Flush(const base::Closure& flush_cb) {
DCHECK_EQ(state_, kPlaying);
set_state(kFlushing);
flush_cb_ = flush_cb;
flush_cb_ = std::move(flush_cb);
// Stop feeding the pipeline.
// Do not invalidate |pushed_buffer_| here since the backend may still be
// using it. Invalidate it in StartPlayingFrom on the assumption that
......@@ -135,7 +135,8 @@ void AvPipelineImpl::Flush(const base::Closure& flush_cb) {
// Reset |decryptor_| to flush buffered frames in |decryptor_|.
decryptor_.reset();
frame_provider_->Flush(base::Bind(&AvPipelineImpl::OnFlushDone, weak_this_));
frame_provider_->Flush(
base::BindOnce(&AvPipelineImpl::OnFlushDone, weak_this_));
}
void AvPipelineImpl::OnFlushDone() {
......@@ -172,7 +173,7 @@ void AvPipelineImpl::FetchBuffer() {
pending_read_ = true;
frame_provider_->Read(
base::Bind(&AvPipelineImpl::OnNewFrame, weak_this_));
base::BindOnce(&AvPipelineImpl::OnNewFrame, weak_this_));
}
void AvPipelineImpl::OnNewFrame(
......@@ -317,7 +318,7 @@ void AvPipelineImpl::OnPushBufferComplete(BufferStatus status) {
void AvPipelineImpl::OnEndOfStream() {
if (!client_.eos_cb.is_null())
client_.eos_cb.Run();
std::move(client_.eos_cb).Run();
}
void AvPipelineImpl::OnDecoderError() {
......
......@@ -41,7 +41,7 @@ class DecoderBufferBase;
class AvPipelineImpl : CmaBackend::Decoder::Delegate {
public:
AvPipelineImpl(CmaBackend::Decoder* decoder, const AvPipelineClient& client);
AvPipelineImpl(CmaBackend::Decoder* decoder, AvPipelineClient client);
~AvPipelineImpl() override;
void SetCdm(CastCdmContext* cast_cdm_context);
......@@ -50,7 +50,7 @@ class AvPipelineImpl : CmaBackend::Decoder::Delegate {
// time, then start rendering samples.
bool StartPlayingFrom(base::TimeDelta time,
const scoped_refptr<BufferingState>& buffering_state);
void Flush(const base::Closure& flush_cb);
void Flush(base::OnceClosure flush_cb);
virtual void UpdateStatistics() = 0;
......@@ -137,10 +137,10 @@ class AvPipelineImpl : CmaBackend::Decoder::Delegate {
base::ThreadChecker thread_checker_;
CmaBackend::Decoder* const decoder_;
const AvPipelineClient client_;
AvPipelineClient client_;
// Callback provided to Flush().
base::Closure flush_cb_;
base::OnceClosure flush_cb_;
// AV pipeline state.
State state_;
......
......@@ -10,8 +10,10 @@ namespace media {
MediaPipelineClient::MediaPipelineClient() {
}
MediaPipelineClient::MediaPipelineClient(const MediaPipelineClient& other) =
default;
MediaPipelineClient::MediaPipelineClient(MediaPipelineClient&& other) = default;
MediaPipelineClient& MediaPipelineClient::operator=(
MediaPipelineClient&& other) = default;
MediaPipelineClient::~MediaPipelineClient() {
}
......
......@@ -14,15 +14,19 @@ namespace chromecast {
namespace media {
struct MediaPipelineClient {
typedef base::Callback<
void(base::TimeDelta, base::TimeDelta, base::TimeTicks)> TimeUpdateCB;
typedef base::RepeatingCallback<
void(base::TimeDelta, base::TimeDelta, base::TimeTicks)>
TimeUpdateCB;
MediaPipelineClient();
MediaPipelineClient(const MediaPipelineClient& other);
MediaPipelineClient(MediaPipelineClient&& other);
MediaPipelineClient(const MediaPipelineClient& other) = delete;
MediaPipelineClient& operator=(MediaPipelineClient&& other);
MediaPipelineClient& operator=(const MediaPipelineClient& other) = delete;
~MediaPipelineClient();
// Callback used to report a playback error as a ::media::PipelineStatus.
::media::PipelineStatusCB error_cb;
::media::PipelineStatusCallback error_cb;
// Callback used to report the latest playback time,
// as well as the maximum time available for rendering.
......
......@@ -132,15 +132,16 @@ void MediaPipelineImpl::Initialize(
new BufferingConfig(low_threshold, high_threshold));
buffering_controller_.reset(new BufferingController(
buffering_config,
base::Bind(&MediaPipelineImpl::OnBufferingNotification, weak_this_)));
base::BindRepeating(&MediaPipelineImpl::OnBufferingNotification,
weak_this_)));
}
}
void MediaPipelineImpl::SetClient(const MediaPipelineClient& client) {
void MediaPipelineImpl::SetClient(MediaPipelineClient client) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!client.error_cb.is_null());
DCHECK(!client.buffering_state_cb.is_null());
client_ = client;
client_ = std::move(client);
}
void MediaPipelineImpl::SetCdm(const base::UnguessableToken* cdm_id) {
......@@ -164,7 +165,7 @@ void MediaPipelineImpl::SetCdm(CastCdmContext* cdm_context) {
::media::PipelineStatus MediaPipelineImpl::InitializeAudio(
const ::media::AudioDecoderConfig& config,
const AvPipelineClient& client,
AvPipelineClient client,
std::unique_ptr<CodedFrameProvider> frame_provider) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!audio_decoder_);
......@@ -173,7 +174,8 @@ void MediaPipelineImpl::SetCdm(CastCdmContext* cdm_context) {
if (!audio_decoder_) {
return ::media::PIPELINE_ERROR_ABORT;
}
audio_pipeline_ = std::make_unique<AudioPipelineImpl>(audio_decoder_, client);
audio_pipeline_ =
std::make_unique<AudioPipelineImpl>(audio_decoder_, std::move(client));
if (cdm_context_)
audio_pipeline_->SetCdm(cdm_context_);
::media::PipelineStatus status =
......@@ -189,7 +191,7 @@ void MediaPipelineImpl::SetCdm(CastCdmContext* cdm_context) {
::media::PipelineStatus MediaPipelineImpl::InitializeVideo(
const std::vector<::media::VideoDecoderConfig>& configs,
const VideoPipelineClient& client,
VideoPipelineClient client,
std::unique_ptr<CodedFrameProvider> frame_provider) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!video_decoder_);
......@@ -198,7 +200,8 @@ void MediaPipelineImpl::SetCdm(CastCdmContext* cdm_context) {
if (!video_decoder_) {
return ::media::PIPELINE_ERROR_ABORT;
}
video_pipeline_.reset(new VideoPipelineImpl(video_decoder_, client));
video_pipeline_.reset(
new VideoPipelineImpl(video_decoder_, std::move(client)));
if (cdm_context_)
video_pipeline_->SetCdm(cdm_context_);
return video_pipeline_->Initialize(configs, std::move(frame_provider));
......@@ -287,11 +290,11 @@ void MediaPipelineImpl::Flush(base::OnceClosure flush_cb) {
pending_flush_task_->done_cb = std::move(flush_cb);
if (audio_pipeline_) {
audio_pipeline_->Flush(
base::Bind(&MediaPipelineImpl::OnFlushDone, weak_this_, true));
base::BindOnce(&MediaPipelineImpl::OnFlushDone, weak_this_, true));
}
if (video_pipeline_) {
video_pipeline_->Flush(
base::Bind(&MediaPipelineImpl::OnFlushDone, weak_this_, false));
base::BindOnce(&MediaPipelineImpl::OnFlushDone, weak_this_, false));
}
}
......@@ -578,7 +581,7 @@ void MediaPipelineImpl::OnError(::media::PipelineStatus error) {
"Cast.Platform.Error", error);
if (!client_.error_cb.is_null())
client_.error_cb.Run(error);
std::move(client_.error_cb).Run(error);
}
void MediaPipelineImpl::ResetBitrateState() {
......
......@@ -45,16 +45,16 @@ class MediaPipelineImpl {
void Initialize(LoadType load_type,
std::unique_ptr<CmaBackend> media_pipeline_backend);
void SetClient(const MediaPipelineClient& client);
void SetClient(MediaPipelineClient client);
void SetCdm(const base::UnguessableToken* cdm_id);
::media::PipelineStatus InitializeAudio(
const ::media::AudioDecoderConfig& config,
const AvPipelineClient& client,
AvPipelineClient client,
std::unique_ptr<CodedFrameProvider> frame_provider);
::media::PipelineStatus InitializeVideo(
const std::vector<::media::VideoDecoderConfig>& configs,
const VideoPipelineClient& client,
VideoPipelineClient client,
std::unique_ptr<CodedFrameProvider> frame_provider);
void StartPlayingFrom(base::TimeDelta time);
void Flush(base::OnceClosure flush_cb);
......
......@@ -10,8 +10,9 @@ namespace media {
VideoPipelineClient::VideoPipelineClient() {
}
VideoPipelineClient::VideoPipelineClient(const VideoPipelineClient& other) =
default;
VideoPipelineClient::VideoPipelineClient(VideoPipelineClient&& other) = default;
VideoPipelineClient& VideoPipelineClient::operator=(
VideoPipelineClient&& other) = default;
VideoPipelineClient::~VideoPipelineClient() {
}
......
......@@ -16,11 +16,14 @@ namespace chromecast {
namespace media {
struct VideoPipelineClient {
typedef base::Callback<void(
const gfx::Size& natural_size)> NaturalSizeChangedCB;
typedef base::RepeatingCallback<void(const gfx::Size& natural_size)>
NaturalSizeChangedCB;
VideoPipelineClient();
VideoPipelineClient(const VideoPipelineClient& other);
VideoPipelineClient(VideoPipelineClient&& other);
VideoPipelineClient(const VideoPipelineClient& other) = delete;
VideoPipelineClient& operator=(VideoPipelineClient&& other);
VideoPipelineClient& operator=(const VideoPipelineClient& other) = delete;
~VideoPipelineClient();
// All the default callbacks.
......
......@@ -28,8 +28,8 @@ const size_t kMaxVideoFrameSize = 1024 * 1024;
}
VideoPipelineImpl::VideoPipelineImpl(CmaBackend::VideoDecoder* decoder,
const VideoPipelineClient& client)
: AvPipelineImpl(decoder, client.av_pipeline_client),
VideoPipelineClient client)
: AvPipelineImpl(decoder, std::move(client.av_pipeline_client)),
video_decoder_(decoder),
natural_size_changed_cb_(client.natural_size_changed_cb) {
DCHECK(video_decoder_);
......
......@@ -28,7 +28,7 @@ class CodedFrameProvider;
class VideoPipelineImpl : public AvPipelineImpl {
public:
VideoPipelineImpl(CmaBackend::VideoDecoder* decoder,
const VideoPipelineClient& client);
VideoPipelineClient client);
~VideoPipelineImpl() override;
::media::PipelineStatus Initialize(
......
......@@ -273,7 +273,7 @@ void OnEncryptedMediaInitData(::media::EmeInitDataType init_data_type,
void OnMediaTracksUpdated(std::unique_ptr<::media::MediaTracks> tracks) {}
void OnNewBuffer(BufferList* buffer_list,
const base::Closure& finished_cb,
const base::RepeatingClosure& finished_cb,
::media::DemuxerStream::Status status,
scoped_refptr<::media::DecoderBuffer> buffer) {
CHECK_EQ(status, ::media::DemuxerStream::kOk);
......@@ -333,9 +333,9 @@ DemuxResult FFmpegDemuxForTest(const base::FilePath& filepath,
bool end_of_stream = false;
while (!end_of_stream) {
base::RunLoop run_loop;
stream->Read(base::Bind(&OnNewBuffer,
base::Unretained(&demux_result.frames),
run_loop.QuitClosure()));
stream->Read(base::BindOnce(&OnNewBuffer,
base::Unretained(&demux_result.frames),
run_loop.QuitClosure()));
run_loop.Run();
CHECK(!demux_result.frames.empty());
end_of_stream = demux_result.frames.back()->end_of_stream();
......
......@@ -42,15 +42,15 @@ namespace {
// for frames read from the DemuxerStream.
const base::TimeDelta kMaxDeltaFetcher(base::TimeDelta::FromMilliseconds(2000));
void VideoModeSwitchCompletionCb(const ::media::PipelineStatusCB& init_cb,
void VideoModeSwitchCompletionCb(::media::PipelineStatusCallback init_cb,
bool success) {
if (!success) {
LOG(ERROR) << "Video mode switch failed.";
init_cb.Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
std::move(init_cb).Run(::media::PIPELINE_ERROR_INITIALIZATION_FAILED);
return;
}
LOG(INFO) << "Video mode switched successfully.";
init_cb.Run(::media::PIPELINE_OK);
std::move(init_cb).Run(::media::PIPELINE_OK);
}
} // namespace
......@@ -213,11 +213,11 @@ void CastRenderer::OnGetMultiroomInfo(
// Create pipeline.
MediaPipelineClient pipeline_client;
pipeline_client.error_cb =
base::Bind(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
base::BindRepeating(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
pipeline_client.buffering_state_cb = base::BindRepeating(
&CastRenderer::OnBufferingStateChange, weak_factory_.GetWeakPtr());
pipeline_.reset(new MediaPipelineImpl());
pipeline_->SetClient(pipeline_client);
pipeline_->SetClient(std::move(pipeline_client));
pipeline_->Initialize(load_type, std::move(backend));
// TODO(servolk): Implement support for multiple streams. For now use the
......@@ -230,20 +230,20 @@ void CastRenderer::OnGetMultiroomInfo(
// Initialize audio.
if (audio_stream) {
AvPipelineClient audio_client;
audio_client.waiting_cb =
base::Bind(&CastRenderer::OnWaiting, weak_factory_.GetWeakPtr());
audio_client.eos_cb = base::Bind(&CastRenderer::OnEnded,
weak_factory_.GetWeakPtr(), STREAM_AUDIO);
audio_client.waiting_cb = base::BindRepeating(&CastRenderer::OnWaiting,
weak_factory_.GetWeakPtr());
audio_client.eos_cb = base::BindOnce(
&CastRenderer::OnEnded, weak_factory_.GetWeakPtr(), STREAM_AUDIO);
audio_client.playback_error_cb =
base::Bind(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
audio_client.statistics_cb = base::Bind(&CastRenderer::OnStatisticsUpdate,
weak_factory_.GetWeakPtr());
base::BindRepeating(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
audio_client.statistics_cb = base::BindRepeating(
&CastRenderer::OnStatisticsUpdate, weak_factory_.GetWeakPtr());
std::unique_ptr<CodedFrameProvider> frame_provider(new DemuxerStreamAdapter(
task_runner_, media_task_runner_factory_, audio_stream));
::media::PipelineStatus status =
pipeline_->InitializeAudio(audio_stream->audio_decoder_config(),
audio_client, std::move(frame_provider));
::media::PipelineStatus status = pipeline_->InitializeAudio(
audio_stream->audio_decoder_config(), std::move(audio_client),
std::move(frame_provider));
if (status != ::media::PIPELINE_OK) {
RunInitCallback(status);
return;
......@@ -254,15 +254,15 @@ void CastRenderer::OnGetMultiroomInfo(
// Initialize video.
if (video_stream) {
VideoPipelineClient video_client;
video_client.av_pipeline_client.waiting_cb =
base::Bind(&CastRenderer::OnWaiting, weak_factory_.GetWeakPtr());
video_client.av_pipeline_client.eos_cb = base::Bind(
video_client.av_pipeline_client.waiting_cb = base::BindRepeating(
&CastRenderer::OnWaiting, weak_factory_.GetWeakPtr());
video_client.av_pipeline_client.eos_cb = base::BindOnce(
&CastRenderer::OnEnded, weak_factory_.GetWeakPtr(), STREAM_VIDEO);
video_client.av_pipeline_client.playback_error_cb =
base::Bind(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
video_client.av_pipeline_client.statistics_cb = base::Bind(
base::BindRepeating(&CastRenderer::OnError, weak_factory_.GetWeakPtr());
video_client.av_pipeline_client.statistics_cb = base::BindRepeating(
&CastRenderer::OnStatisticsUpdate, weak_factory_.GetWeakPtr());
video_client.natural_size_changed_cb = base::Bind(
video_client.natural_size_changed_cb = base::BindRepeating(
&CastRenderer::OnVideoNaturalSizeChange, weak_factory_.GetWeakPtr());
// TODO(alokp): Change MediaPipelineImpl API to accept a single config
// after CmaRenderer is deprecated.
......@@ -272,7 +272,7 @@ void CastRenderer::OnGetMultiroomInfo(
task_runner_, media_task_runner_factory_, video_stream));
::media::PipelineStatus status = pipeline_->InitializeVideo(
video_configs, video_client, std::move(frame_provider));
video_configs, std::move(video_client), std::move(frame_provider));
if (status != ::media::PIPELINE_OK) {
RunInitCallback(status);
return;
......@@ -296,11 +296,11 @@ void CastRenderer::OnGetMultiroomInfo(
std::vector<::media::VideoDecoderConfig> video_configs;
video_configs.push_back(video_stream->video_decoder_config());
auto mode_switch_completion_cb =
base::Bind(&CastRenderer::OnVideoInitializationFinished,
weak_factory_.GetWeakPtr());
base::BindOnce(&CastRenderer::OnVideoInitializationFinished,
weak_factory_.GetWeakPtr());
video_mode_switcher_->SwitchMode(
video_configs, base::BindOnce(&VideoModeSwitchCompletionCb,
mode_switch_completion_cb));
std::move(mode_switch_completion_cb)));
} else if (video_stream) {
// No mode switch needed.
OnVideoInitializationFinished(::media::PIPELINE_OK);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment