Commit c6f271ca authored by Tiansong Cui's avatar Tiansong Cui Committed by Commit Bot

[Chromecast] Delay video based on delay of AudioOutputRedirector

Bug: internal 110279510
Test: manual
Change-Id: I52cc1ab552f6d405cf0eb15c79fa6ea88f51398d
Reviewed-on: https://chromium-review.googlesource.com/c/1372628
Commit-Queue: Tiansong Cui <tiansong@google.com>
Reviewed-by: default avatarKenneth MacKay <kmackay@chromium.org>
Cr-Commit-Position: refs/heads/master@{#616034}
parent dc7d6b8d
...@@ -182,6 +182,11 @@ void AudioDecoderForMixer::RestartPlaybackAt(int64_t timestamp, int64_t pts) { ...@@ -182,6 +182,11 @@ void AudioDecoderForMixer::RestartPlaybackAt(int64_t timestamp, int64_t pts) {
mixer_input_->RestartPlaybackAt(timestamp, pts); mixer_input_->RestartPlaybackAt(timestamp, pts);
} }
AudioDecoderForMixer::RenderingDelay
AudioDecoderForMixer::GetMixerRenderingDelay() {
return mixer_input_->GetMixerRenderingDelay();
}
void AudioDecoderForMixer::Stop() { void AudioDecoderForMixer::Stop() {
TRACE_FUNCTION_ENTRY0(); TRACE_FUNCTION_ENTRY0();
decoder_.reset(); decoder_.reset();
......
...@@ -63,6 +63,8 @@ class AudioDecoderForMixer : public MediaPipelineBackend::AudioDecoder, ...@@ -63,6 +63,8 @@ class AudioDecoderForMixer : public MediaPipelineBackend::AudioDecoder,
float SetAvSyncPlaybackRate(float rate); float SetAvSyncPlaybackRate(float rate);
void RestartPlaybackAt(int64_t pts, int64_t timestamp); void RestartPlaybackAt(int64_t pts, int64_t timestamp);
RenderingDelay GetMixerRenderingDelay();
private: private:
friend class MockAudioDecoderForMixer; friend class MockAudioDecoderForMixer;
friend class AvSyncTest; friend class AvSyncTest;
......
...@@ -233,6 +233,12 @@ float BufferingMixerSource::SetAvSyncPlaybackRate(float rate) { ...@@ -233,6 +233,12 @@ float BufferingMixerSource::SetAvSyncPlaybackRate(float rate) {
return locked->audio_resampler_.SetMediaClockRate(rate); return locked->audio_resampler_.SetMediaClockRate(rate);
} }
BufferingMixerSource::RenderingDelay
BufferingMixerSource::GetMixerRenderingDelay() {
auto locked = locked_members_.Lock();
return locked->mixer_rendering_delay_;
}
BufferingMixerSource::~BufferingMixerSource() { BufferingMixerSource::~BufferingMixerSource() {
LOG(INFO) << "Destroy " << device_id_ << " (" << this << ")"; LOG(INFO) << "Destroy " << device_id_ << " (" << this << ")";
} }
......
...@@ -110,6 +110,10 @@ class BufferingMixerSource : public MixerInput::Source, ...@@ -110,6 +110,10 @@ class BufferingMixerSource : public MixerInput::Source,
// (supposedly) imperceptible. // (supposedly) imperceptible.
float SetAvSyncPlaybackRate(float rate); float SetAvSyncPlaybackRate(float rate);
// Returns the rendering delay from the mixer (ie, ignores any buffering in
// this class).
RenderingDelay GetMixerRenderingDelay();
private: private:
enum class State { enum class State {
kUninitialized, // Not initialized by the mixer yet. kUninitialized, // Not initialized by the mixer yet.
......
...@@ -23,7 +23,11 @@ ...@@ -23,7 +23,11 @@
#endif // defined(OS_FUCHSIA) #endif // defined(OS_FUCHSIA)
namespace { namespace {
int64_t kSyncedPlaybackStartDelayUs = 50000;
// Delay video playback to achieve AV sync when video starts.
// This value is based on experimental calculation.
int64_t kSyncedPlaybackStartDelayUs = 20000;
} // namespace } // namespace
namespace chromecast { namespace chromecast {
...@@ -295,7 +299,8 @@ void MediaPipelineBackendForMixer::TryStartPlayback() { ...@@ -295,7 +299,8 @@ void MediaPipelineBackendForMixer::TryStartPlayback() {
} }
start_playback_timestamp_us_ = start_playback_timestamp_us_ =
MonotonicClockNow() + kSyncedPlaybackStartDelayUs; MonotonicClockNow() + kSyncedPlaybackStartDelayUs +
audio_decoder_->GetMixerRenderingDelay().delay_microseconds;
LOG(INFO) << "Starting playback at=" << start_playback_timestamp_us_; LOG(INFO) << "Starting playback at=" << start_playback_timestamp_us_;
video_decoder_->SetPts(start_playback_timestamp_us_, start_playback_pts_us_); video_decoder_->SetPts(start_playback_timestamp_us_, start_playback_pts_us_);
......
...@@ -124,9 +124,20 @@ void AvSyncVideo::UpkeepAvSync() { ...@@ -124,9 +124,20 @@ void AvSyncVideo::UpkeepAvSync() {
return; return;
} }
if (new_raw_apts != last_apts_value_recorded_) { // If we remove an AudioRedirector with positive delay while video is playing,
// we will end up with new_apts_timestamp < last_apts_timestamp_. We ignore
// the audio frames in this gap to avoid DCHECK failure.
// Technically the linear regression can be 1 min long, so we may react late
// to the audio and video out of sync as the new samples have to offset the
// linear regression enough for the estimate values to start showing the
// difference. In practice this doesn't seem to happen, because we reset the
// linear regression often during AudioRateUpkeep, and hard corrections end up
// kicking in at the correct time.
if (new_raw_apts != last_apts_value_recorded_ &&
new_apts_timestamp > last_apts_timestamp_) {
audio_pts_->AddSample(new_apts_timestamp, new_raw_apts, 1.0); audio_pts_->AddSample(new_apts_timestamp, new_raw_apts, 1.0);
last_apts_value_recorded_ = new_raw_apts; last_apts_value_recorded_ = new_raw_apts;
last_apts_timestamp_ = new_apts_timestamp;
} }
if (video_pts_->num_samples() < 10 || audio_pts_->num_samples() < 20) { if (video_pts_->num_samples() < 10 || audio_pts_->num_samples() < 20) {
......
...@@ -105,6 +105,7 @@ class AvSyncVideo : public AvSync { ...@@ -105,6 +105,7 @@ class AvSyncVideo : public AvSync {
int64_t last_dropped_frames_ = 0; int64_t last_dropped_frames_ = 0;
int64_t last_vpts_value_recorded_ = 0; int64_t last_vpts_value_recorded_ = 0;
int64_t last_apts_value_recorded_ = 0; int64_t last_apts_value_recorded_ = 0;
int64_t last_apts_timestamp_ = 0;
// Those are initialized to INT64_MIN as not to be confused with 0 timestamp // Those are initialized to INT64_MIN as not to be confused with 0 timestamp
// and 0 pts. // and 0 pts.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment