Commit 4de0dd39 authored by Mina Almasry's avatar Mina Almasry Committed by Commit Bot

[Chromecast] Add initial pause/resume/seek logic

Improve the initial implementation by adding proper pause/resume/seek support.

The AudioDecoderForMixer will notify the AvSync of these actions. The
AvSync will turn on/off its AV monitoring as needed, and will reset its
linear regressions as needed.

BUG=internal 73746352
TEST=On device, pause/play work very well now, without any
noticeable jitter by myself at all. Correction logs indicate that we
will do a hard correction after Seek and usually a soft correction after
resume, but there is no visible jitter to me at all

Change-Id: I21376e41eee15161098578208625974ff0385cf8
Reviewed-on: https://chromium-review.googlesource.com/947720
Commit-Queue: Mina Almasry <almasrymina@chromium.org>
Reviewed-by: default avatarKenneth MacKay <kmackay@chromium.org>
Reviewed-by: default avatarSergey Volk <servolk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#541742}
parent 8364e297
......@@ -132,6 +132,7 @@ bool AudioDecoderForMixer::Start(int64_t start_pts) {
if (!rate_shifter_) {
CreateRateShifter(config_.samples_per_second);
}
av_sync_->NotifyStart();
return true;
}
......@@ -141,6 +142,7 @@ void AudioDecoderForMixer::Stop() {
mixer_input_.reset();
rate_shifter_.reset();
weak_factory_.InvalidateWeakPtrs();
av_sync_->NotifyStop();
Initialize();
}
......@@ -150,6 +152,7 @@ bool AudioDecoderForMixer::Pause() {
DCHECK(mixer_input_);
mixer_input_->SetPaused(true);
paused_pts_ = GetCurrentPts();
av_sync_->NotifyPause();
return true;
}
......@@ -158,6 +161,7 @@ bool AudioDecoderForMixer::Resume() {
DCHECK(mixer_input_);
paused_pts_ = kInvalidTimestamp;
mixer_input_->SetPaused(false);
av_sync_->NotifyResume();
return true;
}
......
......@@ -19,6 +19,16 @@ namespace media {
class MediaPipelineBackendForMixer;
// Interface to an AV sync module. This AV sync treats the audio as master and
// syncs the video to it, while attempting to minimize jitter in the video. It
// is typically owned by the audio decoder, but it may be owned by any
// component willing to notify it about the state of the audio playback as
// below.
//
// Whatever the owner of this component is, it should include and depend on
// this interface rather the implementation header file. It should be possible
// for someone in the future to provide their own implementation of this class
// by linking in their AvSync::Create method statically defined below.
class AvSync {
public:
static std::unique_ptr<AvSync> Create(
......@@ -27,9 +37,36 @@ class AvSync {
virtual ~AvSync() = default;
// Notify that an audio buffer has been pushed to the mixer, and what was the
// rendering delay corresponding to this audio buffer. The AV sync code may
// choose to use this information however it pleases, but typically it would
// use it to understand what is the audio PTS at any moment, and use this
// information to sync the video accordingly.
virtual void NotifyAudioBufferPushed(
int64_t buffer_timestamp,
MediaPipelineBackend::AudioDecoder::RenderingDelay delay) = 0;
// Notify that the audio playback has been started. The AV sync will typically
// start upkeeping AV sync. The AV sync code is *not* responsible for
// starting the video.
// TODO(almasrymina): consider actually changing AV sync's responsibilities
// to pause/resume/stop/start playback.
virtual void NotifyStart() = 0;
// Notify that the audio playback has been stopped. The AV sync will typically
// stop upkeeping AV sync. The AV sync code is *not* responsible for stopping
// the video.
virtual void NotifyStop() = 0;
// Notify that the audio playback has been paused. The AV sync code will
// typically stop upkeeping AV sync until the audio playback is resumed again.
// The AV sync code is *not* responsible for pausing the video.
virtual void NotifyPause() = 0;
// Notify that the audio playback has been resumed. The AV sync code will
// typically start upkeeping AV sync again after this is called. The AV sync
// code is *not* responsible for resuming the video.
virtual void NotifyResume() = 0;
};
} // namespace media
......
......@@ -17,6 +17,10 @@ class AvSyncDummy : public AvSync {
void NotifyAudioBufferPushed(
int64_t buffer_timestamp,
MediaPipelineBackend::AudioDecoder::RenderingDelay delay) override;
void NotifyStart() override;
void NotifyStop() override;
void NotifyPause() override;
void NotifyResume() override;
};
std::unique_ptr<AvSync> AvSync::Create(
......@@ -31,5 +35,13 @@ void AvSyncDummy::NotifyAudioBufferPushed(
int64_t buffer_timestamp,
MediaPipelineBackend::AudioDecoder::RenderingDelay delay) {}
void AvSyncDummy::NotifyStart() {}
void AvSyncDummy::NotifyStop() {}
void AvSyncDummy::NotifyPause() {}
void AvSyncDummy::NotifyResume() {}
} // namespace media
} // namespace chromecast
......@@ -58,10 +58,8 @@ bool MediaPipelineBackendForMixer::Initialize() {
bool MediaPipelineBackendForMixer::Start(int64_t start_pts) {
DCHECK_EQ(kStateInitialized, state_);
if (audio_decoder_ && !audio_decoder_->Start(start_pts))
return false;
if (video_decoder_ && !video_decoder_->Start(start_pts, true))
return false;
......@@ -84,7 +82,8 @@ bool MediaPipelineBackendForMixer::Pause() {
DCHECK_EQ(kStatePlaying, state_);
if (audio_decoder_ && !audio_decoder_->Pause())
return false;
// TODO(almasrymina): Implement pause/resume.
if (video_decoder_ && !video_decoder_->Pause())
return false;
state_ = kStatePaused;
return true;
......@@ -94,7 +93,8 @@ bool MediaPipelineBackendForMixer::Resume() {
DCHECK_EQ(kStatePaused, state_);
if (audio_decoder_ && !audio_decoder_->Resume())
return false;
// TODO(almasrymina): Implement pause/resume.
if (video_decoder_ && !video_decoder_->Resume())
return false;
state_ = kStatePlaying;
return true;
......
......@@ -66,27 +66,8 @@ void AvSyncVideo::NotifyAudioBufferPushed(
buffer_timestamp == INT64_MAX)
return;
int64_t absolute_ts = delay.delay_microseconds + delay.timestamp_microseconds;
audio_pts_->AddSample(delay.timestamp_microseconds,
buffer_timestamp - (delay.delay_microseconds), 1.0);
if (!setup_video_clock_ && backend_->video_decoder()) {
// TODO(almasrymina): If we don't have a valid delay at the start of
// playback, we should push silence to the mixer to get a valid delay
// before we start content playback.
LOG(INFO) << "Got valid delay. buffer_timestamp=" << buffer_timestamp
<< " delay.delay_microseconds=" << delay.delay_microseconds
<< " delay.timestamp_microseconds="
<< delay.timestamp_microseconds;
backend_->video_decoder()->SetCurrentPts(
((int64_t)buffer_timestamp) -
(absolute_ts - backend_->MonotonicClockNow()));
setup_video_clock_ = true;
timer_.Start(FROM_HERE, kAvSyncUpkeepInterval, this,
&AvSyncVideo::UpkeepAvSync);
}
}
// TODO(almasrymina): this code is the core of the av sync logic, and the
......@@ -99,13 +80,29 @@ void AvSyncVideo::NotifyAudioBufferPushed(
// - Current requirements for number of samples in the linear regression is
// arbitrary.
void AvSyncVideo::UpkeepAvSync() {
DCHECK(setup_video_clock_);
if (!backend_->video_decoder()) {
VLOG(4) << "No video decoder available.";
return;
}
int64_t now = backend_->MonotonicClockNow(); // 'now'...
int64_t current_apts;
double error;
if (!setup_video_clock_) {
// TODO(almasrymina): If we don't have a valid delay at the start of
// playback, we should push silence to the mixer to get a valid delay
// before we start content playback.
if (audio_pts_->num_samples() > 1) {
audio_pts_->EstimateY(now, &current_apts, &error);
LOG(INFO) << "Setting up video clock. current_apts=" << current_apts;
backend_->video_decoder()->SetCurrentPts(current_apts);
setup_video_clock_ = true;
}
return;
}
video_pts_->AddSample(now, backend_->video_decoder()->GetCurrentPts(), 1.0);
......@@ -117,8 +114,6 @@ void AvSyncVideo::UpkeepAvSync() {
}
int64_t current_vpts;
int64_t current_apts;
double error;
double vpts_slope;
double apts_slope;
video_pts_->EstimateY(now, &current_vpts, &error);
......@@ -187,6 +182,35 @@ void AvSyncVideo::UpkeepAvSync() {
}
}
void AvSyncVideo::StopAvSync() {
audio_pts_.reset(
new WeightedMovingLinearRegression(kLinearRegressionDataLifetimeUs));
video_pts_.reset(
new WeightedMovingLinearRegression(kLinearRegressionDataLifetimeUs));
error_.reset(
new WeightedMovingLinearRegression(kLinearRegressionDataLifetimeUs));
timer_.Stop();
}
void AvSyncVideo::NotifyStart() {
timer_.Start(FROM_HERE, kAvSyncUpkeepInterval, this,
&AvSyncVideo::UpkeepAvSync);
}
void AvSyncVideo::NotifyStop() {
StopAvSync();
setup_video_clock_ = false;
}
void AvSyncVideo::NotifyPause() {
StopAvSync();
}
void AvSyncVideo::NotifyResume() {
timer_.Start(FROM_HERE, kAvSyncUpkeepInterval, this,
&AvSyncVideo::UpkeepAvSync);
}
AvSyncVideo::~AvSyncVideo() = default;
} // namespace media
......
......@@ -32,9 +32,14 @@ class AvSyncVideo : public AvSync {
void NotifyAudioBufferPushed(
int64_t buffer_timestamp,
MediaPipelineBackend::AudioDecoder::RenderingDelay delay) override;
void NotifyStart() override;
void NotifyStop() override;
void NotifyPause() override;
void NotifyResume() override;
private:
void UpkeepAvSync();
void StopAvSync();
base::RepeatingTimer timer_;
bool setup_video_clock_ = false;
......
......@@ -44,10 +44,12 @@ class VideoDecoderForMixer : public MediaPipelineBackend::VideoDecoder {
// Stop playback.
virtual void Stop() = 0;
// Pause playback.
// Pause playback. The video decoder must retain its playback rate after
// resume.
virtual bool Pause() = 0;
// Resume playback.
// Resume playback. The video decoder must resume playback at the same
// playback rate prior to pausing.
virtual bool Resume() = 0;
// Get the current video PTS. This will typically be the pts of the last
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment