Commit d61b8a40 authored by Alex Leung's avatar Alex Leung Committed by Commit Bot

Disable audio prefetch when video is present.

* Rendering delay is not correctly calculated since it does not
  account for data buffered in the backend.
* Fixes sync issues with GPM Lyrics

Solution until crbug/988535 is fixed.

Bug: b/138451922
Change-Id: Ic7891995522870c7de8eb0f3975406e76b078a4e
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1736805
Commit-Queue: Alex Leung <alexleung@google.com>
Reviewed-by: default avatarYuchen Liu <yucliu@chromium.org>
Reviewed-by: default avatarJohn Rummell <jrummell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#690085}
parent 7fd091be
...@@ -98,6 +98,7 @@ buildflag_header("audio_buildflags") { ...@@ -98,6 +98,7 @@ buildflag_header("audio_buildflags") {
header = "audio_buildflags.h" header = "audio_buildflags.h"
flags = [ flags = [
"MEDIA_CLOCK_MONOTONIC_RAW=$media_clock_monotonic_raw",
"MINIMUM_OUTPUT_BUFFER_SIZE_IN_FRAMES=$minimum_output_buffer_size_in_frames", "MINIMUM_OUTPUT_BUFFER_SIZE_IN_FRAMES=$minimum_output_buffer_size_in_frames",
"MAXIMUM_OUTPUT_BUFFER_SIZE_IN_FRAMES=$maximum_output_buffer_size_in_frames", "MAXIMUM_OUTPUT_BUFFER_SIZE_IN_FRAMES=$maximum_output_buffer_size_in_frames",
"DEFAULT_OUTPUT_BUFFER_SIZE_IN_FRAMES=$default_output_buffer_size_in_frames", "DEFAULT_OUTPUT_BUFFER_SIZE_IN_FRAMES=$default_output_buffer_size_in_frames",
......
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
#include "chromecast/base/bind_to_task_runner.h" #include "chromecast/base/bind_to_task_runner.h"
#include "chromecast/base/metrics/cast_metrics_helper.h" #include "chromecast/base/metrics/cast_metrics_helper.h"
#include "chromecast/common/mojom/constants.mojom.h" #include "chromecast/common/mojom/constants.mojom.h"
#include "chromecast/media/audio/audio_buildflags.h"
#include "chromecast/media/audio/cast_audio_manager.h" #include "chromecast/media/audio/cast_audio_manager.h"
#include "chromecast/media/audio/mixer_service/mixer_service.pb.h" #include "chromecast/media/audio/mixer_service/mixer_service.pb.h"
#include "chromecast/media/audio/mixer_service/mixer_service_connection.h" #include "chromecast/media/audio/mixer_service/mixer_service_connection.h"
...@@ -51,7 +52,6 @@ ...@@ -51,7 +52,6 @@
} while (0) } while (0)
namespace { namespace {
const int64_t kInvalidTimestamp = std::numeric_limits<int64_t>::min();
// Below are settings for MixerService and the DirectAudio it uses. // Below are settings for MixerService and the DirectAudio it uses.
constexpr base::TimeDelta kFadeTime = base::TimeDelta::FromMilliseconds(5); constexpr base::TimeDelta kFadeTime = base::TimeDelta::FromMilliseconds(5);
constexpr base::TimeDelta kMixerStartThreshold = constexpr base::TimeDelta kMixerStartThreshold =
...@@ -63,6 +63,16 @@ namespace chromecast { ...@@ -63,6 +63,16 @@ namespace chromecast {
namespace media { namespace media {
namespace { namespace {
int64_t MonotonicClockNow() {
timespec now = {0, 0};
#if BUILDFLAG(MEDIA_CLOCK_MONOTONIC_RAW)
clock_gettime(CLOCK_MONOTONIC_RAW, &now);
#else
clock_gettime(CLOCK_MONOTONIC, &now);
#endif // BUILDFLAG(MEDIA_CLOCK_MONOTONIC_RAW)
return static_cast<int64_t>(now.tv_sec) * 1000000 + now.tv_nsec / 1000;
}
AudioContentType GetContentType(const std::string& device_id) { AudioContentType GetContentType(const std::string& device_id) {
if (::media::AudioDeviceDescription::IsCommunicationsDevice(device_id)) { if (::media::AudioDeviceDescription::IsCommunicationsDevice(device_id)) {
return AudioContentType::kCommunication; return AudioContentType::kCommunication;
...@@ -130,6 +140,8 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate { ...@@ -130,6 +140,8 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate {
void OnVideoResolutionChanged(const Size& size) override {} void OnVideoResolutionChanged(const Size& size) override {}
void OnPushBufferComplete(BufferStatus status) override; void OnPushBufferComplete(BufferStatus status) override;
const bool is_audio_prefetch_;
scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner_; scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner_;
const ::media::AudioParameters audio_params_; const ::media::AudioParameters audio_params_;
const std::string device_id_; const std::string device_id_;
...@@ -145,7 +157,9 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate { ...@@ -145,7 +157,9 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate {
base::OneShotTimer push_timer_; base::OneShotTimer push_timer_;
bool push_in_progress_; bool push_in_progress_;
bool encountered_error_; bool encountered_error_;
base::TimeTicks next_push_time_;
base::TimeTicks last_push_complete_time_; base::TimeTicks last_push_complete_time_;
base::TimeDelta last_rendering_delay_;
base::TimeDelta render_buffer_size_estimate_ = kRenderBufferSize; base::TimeDelta render_buffer_size_estimate_ = kRenderBufferSize;
CmaBackend::AudioDecoder* audio_decoder_; CmaBackend::AudioDecoder* audio_decoder_;
AudioSourceCallback* source_callback_; AudioSourceCallback* source_callback_;
...@@ -160,7 +174,9 @@ CastAudioOutputStream::CmaWrapper::CmaWrapper( ...@@ -160,7 +174,9 @@ CastAudioOutputStream::CmaWrapper::CmaWrapper(
const ::media::AudioParameters& audio_params, const ::media::AudioParameters& audio_params,
const std::string& device_id, const std::string& device_id,
CmaBackendFactory* cma_backend_factory) CmaBackendFactory* cma_backend_factory)
: audio_task_runner_(audio_task_runner), : is_audio_prefetch_(audio_params.effects() &
::media::AudioParameters::AUDIO_PREFETCH),
audio_task_runner_(audio_task_runner),
audio_params_(audio_params), audio_params_(audio_params),
device_id_(device_id), device_id_(device_id),
cma_backend_factory_(cma_backend_factory), cma_backend_factory_(cma_backend_factory),
...@@ -172,6 +188,8 @@ CastAudioOutputStream::CmaWrapper::CmaWrapper( ...@@ -172,6 +188,8 @@ CastAudioOutputStream::CmaWrapper::CmaWrapper(
DCHECK(audio_task_runner_); DCHECK(audio_task_runner_);
DCHECK(cma_backend_factory_); DCHECK(cma_backend_factory_);
LOG(INFO) << "Enable audio prefetch: " << is_audio_prefetch_;
// Set the default state. // Set the default state.
push_in_progress_ = false; push_in_progress_ = false;
encountered_error_ = false; encountered_error_ = false;
...@@ -257,6 +275,7 @@ void CastAudioOutputStream::CmaWrapper::Start( ...@@ -257,6 +275,7 @@ void CastAudioOutputStream::CmaWrapper::Start(
cma_backend_->Start(0); cma_backend_->Start(0);
render_buffer_size_estimate_ = kRenderBufferSize; render_buffer_size_estimate_ = kRenderBufferSize;
} }
next_push_time_ = base::TimeTicks::Now();
last_push_complete_time_ = base::TimeTicks::Now(); last_push_complete_time_ = base::TimeTicks::Now();
cma_backend_state_ = CmaBackendState::kStarted; cma_backend_state_ = CmaBackendState::kStarted;
media_thread_state_ = kStarted; media_thread_state_ = kStarted;
...@@ -349,21 +368,34 @@ void CastAudioOutputStream::CmaWrapper::PushBuffer() { ...@@ -349,21 +368,34 @@ void CastAudioOutputStream::CmaWrapper::PushBuffer() {
audio_decoder_->GetRenderingDelay(); audio_decoder_->GetRenderingDelay();
base::TimeDelta delay; base::TimeDelta delay;
if (rendering_delay.delay_microseconds < 0) { if (rendering_delay.delay_microseconds < 0 ||
rendering_delay.timestamp_microseconds < 0) {
// This occurs immediately after start/resume when there isn't a good
// estimate of the buffer delay. Use the last known good delay.
delay = last_rendering_delay_;
} else {
// The rendering delay to account for buffering is not included in
// rendering_delay.delay_microseconds but is in delay_timestamp which isn't
// used by AudioOutputStreamImpl.
if (is_audio_prefetch_) {
// Only account for this when prefetch is enabled or else video will
// stutter (b/123999757).
delay = base::TimeDelta::FromMicroseconds(
rendering_delay.delay_microseconds +
rendering_delay.timestamp_microseconds - MonotonicClockNow());
if (delay.InMicroseconds() < 0) {
delay = base::TimeDelta(); delay = base::TimeDelta();
}
} else { } else {
delay = delay =
base::TimeDelta::FromMicroseconds(rendering_delay.delay_microseconds); base::TimeDelta::FromMicroseconds(rendering_delay.delay_microseconds);
} }
// This isn't actually used by audio_renderer_impl
base::TimeTicks delay_timestamp = base::TimeTicks();
if (rendering_delay.timestamp_microseconds != kInvalidTimestamp) {
delay_timestamp += base::TimeDelta::FromMicroseconds(
rendering_delay.timestamp_microseconds);
} }
int frame_count = last_rendering_delay_ = delay;
source_callback_->OnMoreData(delay, delay_timestamp, 0, audio_bus_.get());
int frame_count = source_callback_->OnMoreData(delay, base::TimeTicks(), 0,
audio_bus_.get());
DVLOG(3) << "frames_filled=" << frame_count << " with latency=" << delay; DVLOG(3) << "frames_filled=" << frame_count << " with latency=" << delay;
if (frame_count == 0) { if (frame_count == 0) {
...@@ -406,11 +438,22 @@ void CastAudioOutputStream::CmaWrapper::OnPushBufferComplete( ...@@ -406,11 +438,22 @@ void CastAudioOutputStream::CmaWrapper::OnPushBufferComplete(
last_push_complete_time_ = now; last_push_complete_time_ = now;
base::TimeDelta delay; base::TimeDelta delay;
if (is_audio_prefetch_) {
// For multizone-playback, we don't care about AV sync and want to pre-fetch
// audio.
render_buffer_size_estimate_ -= buffer_duration_;
render_buffer_size_estimate_ += now - last_push_complete_time_;
last_push_complete_time_ = now;
if (render_buffer_size_estimate_ >= buffer_duration_) { if (render_buffer_size_estimate_ >= buffer_duration_) {
delay = base::TimeDelta::FromSeconds(0); delay = base::TimeDelta::FromSeconds(0);
} else { } else {
delay = buffer_duration_; delay = buffer_duration_;
} }
} else {
next_push_time_ = std::max(now, next_push_time_ + buffer_duration_);
delay = next_push_time_ - now;
}
DVLOG(3) << "render_buffer_size_estimate_=" << render_buffer_size_estimate_ DVLOG(3) << "render_buffer_size_estimate_=" << render_buffer_size_estimate_
<< " delay=" << delay << " buffer_duration_=" << buffer_duration_; << " delay=" << delay << " buffer_duration_=" << buffer_duration_;
......
...@@ -831,6 +831,29 @@ TEST_F(CastAudioOutputStreamTest, Volume) { ...@@ -831,6 +831,29 @@ TEST_F(CastAudioOutputStreamTest, Volume) {
stream->Close(); stream->Close();
} }
TEST_F(CastAudioOutputStreamTest, InvalidAudioDelay) {
::media::AudioOutputStream* stream = CreateStream();
ASSERT_TRUE(stream);
ASSERT_TRUE(stream->Open());
RunThreadsUntilIdle();
FakeAudioDecoder* audio_decoder = GetAudioDecoder();
ASSERT_TRUE(audio_decoder);
audio_decoder->set_rendering_delay(
CmaBackend::AudioDecoder::RenderingDelay(-1, 0));
::media::MockAudioSourceCallback source_callback;
const base::TimeDelta delay = base::TimeDelta();
EXPECT_CALL(source_callback, OnMoreData(delay, _, _, _))
.WillRepeatedly(Invoke(OnMoreData));
stream->Start(&source_callback);
RunThreadsUntilIdle();
stream->Stop();
stream->Close();
}
TEST_F(CastAudioOutputStreamTest, AudioDelay) { TEST_F(CastAudioOutputStreamTest, AudioDelay) {
::media::AudioOutputStream* stream = CreateStream(); ::media::AudioOutputStream* stream = CreateStream();
ASSERT_TRUE(stream); ASSERT_TRUE(stream);
...@@ -844,9 +867,7 @@ TEST_F(CastAudioOutputStreamTest, AudioDelay) { ...@@ -844,9 +867,7 @@ TEST_F(CastAudioOutputStreamTest, AudioDelay) {
::media::MockAudioSourceCallback source_callback; ::media::MockAudioSourceCallback source_callback;
const base::TimeDelta delay(base::TimeDelta::FromMicroseconds(kDelayUs)); const base::TimeDelta delay(base::TimeDelta::FromMicroseconds(kDelayUs));
const base::TimeTicks delay_timestamp( EXPECT_CALL(source_callback, OnMoreData(delay, _, _, _))
base::TimeTicks() + base::TimeDelta::FromMicroseconds(kDelayTimestampUs));
EXPECT_CALL(source_callback, OnMoreData(delay, delay_timestamp, _, _))
.WillRepeatedly(Invoke(OnMoreData)); .WillRepeatedly(Invoke(OnMoreData));
stream->Start(&source_callback); stream->Start(&source_callback);
......
...@@ -225,6 +225,7 @@ jumbo_source_set("base") { ...@@ -225,6 +225,7 @@ jumbo_source_set("base") {
"reentrancy_checker.h", "reentrancy_checker.h",
"renderer.cc", "renderer.cc",
"renderer.h", "renderer.h",
"renderer_client.cc",
"renderer_client.h", "renderer_client.h",
"renderer_factory.cc", "renderer_factory.cc",
"renderer_factory.h", "renderer_factory.h",
......
...@@ -152,16 +152,17 @@ class MEDIA_SHMEM_EXPORT AudioParameters { ...@@ -152,16 +152,17 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
// effects should be enabled. // effects should be enabled.
enum PlatformEffectsMask { enum PlatformEffectsMask {
NO_EFFECTS = 0x0, NO_EFFECTS = 0x0,
ECHO_CANCELLER = 0x1, ECHO_CANCELLER = 1 << 0,
DUCKING = 0x2, // Enables ducking if the OS supports it. DUCKING = 1 << 1, // Enables ducking if the OS supports it.
KEYBOARD_MIC = 0x4, KEYBOARD_MIC = 1 << 2,
HOTWORD = 0x8, HOTWORD = 1 << 3,
NOISE_SUPPRESSION = 0x10, NOISE_SUPPRESSION = 1 << 4,
AUTOMATIC_GAIN_CONTROL = 0x20, AUTOMATIC_GAIN_CONTROL = 1 << 5,
EXPERIMENTAL_ECHO_CANCELLER = 0x40, // Indicates an echo canceller is EXPERIMENTAL_ECHO_CANCELLER = 1 << 6, // Indicates an echo canceller is
// available that should only // available that should only
// experimentally be enabled. // experimentally be enabled.
MULTIZONE = 0x80, MULTIZONE = 1 << 7,
AUDIO_PREFETCH = 1 << 8,
}; };
struct HardwareCapabilities { struct HardwareCapabilities {
......
...@@ -290,6 +290,7 @@ class MockRendererClient : public RendererClient { ...@@ -290,6 +290,7 @@ class MockRendererClient : public RendererClient {
MOCK_METHOD1(OnVideoOpacityChange, void(bool)); MOCK_METHOD1(OnVideoOpacityChange, void(bool));
MOCK_METHOD1(OnDurationChange, void(base::TimeDelta)); MOCK_METHOD1(OnDurationChange, void(base::TimeDelta));
MOCK_METHOD1(OnRemotePlayStateChange, void(MediaStatus::State state)); MOCK_METHOD1(OnRemotePlayStateChange, void(MediaStatus::State state));
MOCK_METHOD0(IsVideoStreamAvailable, bool());
}; };
class MockVideoRenderer : public VideoRenderer { class MockVideoRenderer : public VideoRenderer {
......
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/base/renderer_client.h"
namespace media {
bool RendererClient::IsVideoStreamAvailable() {
return true;
}
} // namespace media
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "base/time/time.h" #include "base/time/time.h"
#include "media/base/audio_decoder_config.h" #include "media/base/audio_decoder_config.h"
#include "media/base/buffering_state.h"
#include "media/base/media_status.h" #include "media/base/media_status.h"
#include "media/base/pipeline_status.h" #include "media/base/pipeline_status.h"
#include "media/base/video_decoder_config.h" #include "media/base/video_decoder_config.h"
...@@ -17,7 +18,7 @@ namespace media { ...@@ -17,7 +18,7 @@ namespace media {
// Interface used by Renderer, AudioRenderer, VideoRenderer and // Interface used by Renderer, AudioRenderer, VideoRenderer and
// MediaPlayerRenderer implementations to notify their clients. // MediaPlayerRenderer implementations to notify their clients.
class RendererClient { class MEDIA_EXPORT RendererClient {
public: public:
// Executed if any error was encountered after Renderer initialization. // Executed if any error was encountered after Renderer initialization.
virtual void OnError(PipelineStatus status) = 0; virtual void OnError(PipelineStatus status) = 0;
...@@ -48,6 +49,11 @@ class RendererClient { ...@@ -48,6 +49,11 @@ class RendererClient {
// Executed for the first video frame and whenever opacity changes. // Executed for the first video frame and whenever opacity changes.
// Only used if media stream contains a video track. // Only used if media stream contains a video track.
virtual void OnVideoOpacityChange(bool opaque) = 0; virtual void OnVideoOpacityChange(bool opaque) = 0;
// Returns true if video stream is available in the media resource.
// TODO(crbug.com/988535): Used by AudioRendererImpl. This can be removed
// when the bug is resolved.
virtual bool IsVideoStreamAvailable();
}; };
} // namespace media } // namespace media
......
...@@ -558,6 +558,13 @@ void AudioRendererImpl::OnDeviceInfoReceived( ...@@ -558,6 +558,13 @@ void AudioRendererImpl::OnDeviceInfoReceived(
audio_parameters_.set_latency_tag(AudioLatency::LATENCY_PLAYBACK); audio_parameters_.set_latency_tag(AudioLatency::LATENCY_PLAYBACK);
if (!client_->IsVideoStreamAvailable()) {
// When video is not available, audio prefetch can be enabled. See
// crbug/988535.
audio_parameters_.set_effects(audio_parameters_.effects() |
::media::AudioParameters::AUDIO_PREFETCH);
}
last_decoded_channel_layout_ = last_decoded_channel_layout_ =
stream->audio_decoder_config().channel_layout(); stream->audio_decoder_config().channel_layout();
......
...@@ -35,8 +35,10 @@ static const int kDefaultVideoUnderflowThresholdMs = 3000; ...@@ -35,8 +35,10 @@ static const int kDefaultVideoUnderflowThresholdMs = 3000;
class RendererImpl::RendererClientInternal final : public RendererClient { class RendererImpl::RendererClientInternal final : public RendererClient {
public: public:
RendererClientInternal(DemuxerStream::Type type, RendererImpl* renderer) RendererClientInternal(DemuxerStream::Type type,
: type_(type), renderer_(renderer) { RendererImpl* renderer,
MediaResource* media_resource)
: type_(type), renderer_(renderer), media_resource_(media_resource) {
DCHECK((type_ == DemuxerStream::AUDIO) || (type_ == DemuxerStream::VIDEO)); DCHECK((type_ == DemuxerStream::AUDIO) || (type_ == DemuxerStream::VIDEO));
} }
...@@ -67,9 +69,14 @@ class RendererImpl::RendererClientInternal final : public RendererClient { ...@@ -67,9 +69,14 @@ class RendererImpl::RendererClientInternal final : public RendererClient {
renderer_->OnVideoOpacityChange(opaque); renderer_->OnVideoOpacityChange(opaque);
} }
bool IsVideoStreamAvailable() override {
return media_resource_->GetFirstStream(::media::DemuxerStream::VIDEO);
}
private: private:
DemuxerStream::Type type_; DemuxerStream::Type type_;
RendererImpl* renderer_; RendererImpl* renderer_;
MediaResource* media_resource_;
}; };
RendererImpl::RendererImpl( RendererImpl::RendererImpl(
...@@ -369,7 +376,7 @@ void RendererImpl::InitializeAudioRenderer() { ...@@ -369,7 +376,7 @@ void RendererImpl::InitializeAudioRenderer() {
current_audio_stream_ = audio_stream; current_audio_stream_ = audio_stream;
audio_renderer_client_.reset( audio_renderer_client_.reset(
new RendererClientInternal(DemuxerStream::AUDIO, this)); new RendererClientInternal(DemuxerStream::AUDIO, this, media_resource_));
// Note: After the initialization of a renderer, error events from it may // Note: After the initialization of a renderer, error events from it may
// happen at any time and all future calls must guard against STATE_ERROR. // happen at any time and all future calls must guard against STATE_ERROR.
audio_renderer_->Initialize(audio_stream, cdm_context_, audio_renderer_->Initialize(audio_stream, cdm_context_,
...@@ -420,7 +427,7 @@ void RendererImpl::InitializeVideoRenderer() { ...@@ -420,7 +427,7 @@ void RendererImpl::InitializeVideoRenderer() {
current_video_stream_ = video_stream; current_video_stream_ = video_stream;
video_renderer_client_.reset( video_renderer_client_.reset(
new RendererClientInternal(DemuxerStream::VIDEO, this)); new RendererClientInternal(DemuxerStream::VIDEO, this, media_resource_));
video_renderer_->Initialize( video_renderer_->Initialize(
video_stream, cdm_context_, video_renderer_client_.get(), video_stream, cdm_context_, video_renderer_client_.get(),
base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)), base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)),
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment