Commit 86e750e4 authored by Alex Leung's avatar Alex Leung Committed by Commit Bot

[Chromecast] Fetch data faster than realtime in ChromecastAudioOutputStream.

* Add CastAudioDeviceFactory to use simple AudioOutputDevice; disables
mixer
* Use MakeAudioOutputStream instead of MakeAudioOutputStreamProxy;
prevents AudioOutputStream from closing
* Update CastAudioOutputStream Start/Stop/Flush logic.

Bug: b/124402185
Change-Id: Iabf4c06e7b2af40dd714d589713ac00880c25e89
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1625959Reviewed-by: default avatarYuchen Liu <yucliu@chromium.org>
Reviewed-by: default avatarKinuko Yasuda <kinuko@chromium.org>
Reviewed-by: default avatarDale Curtis <dalecurtis@chromium.org>
Commit-Queue: Alex Leung <alexleung@google.com>
Cr-Commit-Position: refs/heads/master@{#663984}
parent 65e4aac9
......@@ -5,6 +5,8 @@ include_rules = [
"+content/public/test/test_browser_thread.h",
"+content/public/test/test_browser_thread_bundle.h",
"+content/public/test/test_utils.h",
"+content/renderer/media/audio/audio_device_factory.h",
"+content/renderer/media/audio/audio_output_ipc_factory.h",
"+media/audio",
"+media/base",
"+media/cdm",
......
......@@ -49,6 +49,21 @@ cast_source_set("audio") {
configs += [ "//media/audio:platform_config" ]
}
if (is_android) {
cast_source_set("cast_audio_device_factory") {
sources = [
"cast_audio_device_factory.cc",
"cast_audio_device_factory.h",
]
deps = [
"//base",
"//content/renderer:renderer",
"//media",
]
}
}
cast_shared_library("libcast_external_audio_pipeline_1.0") {
sources = [
"external_audio_pipeline_dummy.cc",
......
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromecast/media/audio/cast_audio_device_factory.h"
#include <string>
#include "content/renderer/media/audio/audio_output_ipc_factory.h"
#include "media/audio/audio_output_device.h"
#include "media/base/audio_capturer_source.h"
#include "media/base/audio_parameters.h"
#include "media/base/audio_renderer_sink.h"
#include "media/base/output_device_info.h"
namespace chromecast {
namespace media {
class NonSwitchableAudioRendererSink
: public ::media::SwitchableAudioRendererSink {
public:
explicit NonSwitchableAudioRendererSink(
scoped_refptr<::media::AudioOutputDevice> output_device)
: output_device_(std::move(output_device)) {}
void Initialize(const ::media::AudioParameters& params,
RenderCallback* callback) override {
output_device_->Initialize(params, callback);
}
void Start() override { output_device_->Start(); }
void Stop() override { output_device_->Stop(); }
void Pause() override { output_device_->Pause(); }
void Play() override { output_device_->Play(); }
bool SetVolume(double volume) override {
return output_device_->SetVolume(volume);
}
::media::OutputDeviceInfo GetOutputDeviceInfo() override {
return output_device_->GetOutputDeviceInfo();
}
void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) override {
output_device_->GetOutputDeviceInfoAsync(std::move(info_cb));
}
bool IsOptimizedForHardwareParameters() override {
return output_device_->IsOptimizedForHardwareParameters();
}
bool CurrentThreadIsRenderingThread() override {
return output_device_->CurrentThreadIsRenderingThread();
}
void SwitchOutputDevice(const std::string& device_id,
::media::OutputDeviceStatusCB callback) override {
LOG(ERROR) << __func__ << " is not suported.";
std::move(callback).Run(::media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL);
}
void Flush() override { output_device_->Flush(); }
protected:
~NonSwitchableAudioRendererSink() override = default;
private:
scoped_refptr<::media::AudioOutputDevice> output_device_;
};
scoped_refptr<::media::AudioOutputDevice> NewOutputDevice(
int render_frame_id,
const ::media::AudioSinkParameters& params,
base::TimeDelta auth_timeout) {
auto device = base::MakeRefCounted<::media::AudioOutputDevice>(
content::AudioOutputIPCFactory::get()->CreateAudioOutputIPC(
render_frame_id),
content::AudioOutputIPCFactory::get()->io_task_runner(), params,
auth_timeout);
device->RequestDeviceAuthorization();
return device;
}
CastAudioDeviceFactory::CastAudioDeviceFactory()
: content::AudioDeviceFactory() {
DVLOG(1) << "Register CastAudioDeviceFactory";
}
CastAudioDeviceFactory::~CastAudioDeviceFactory() {
DVLOG(1) << "Unregister CastAudioDeviceFactory";
}
scoped_refptr<::media::AudioRendererSink>
CastAudioDeviceFactory::CreateFinalAudioRendererSink(
int render_frame_id,
const ::media::AudioSinkParameters& params,
base::TimeDelta auth_timeout) {
// Use default implementation.
return nullptr;
}
scoped_refptr<::media::AudioRendererSink>
CastAudioDeviceFactory::CreateAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) {
// Use default implementation.
return nullptr;
}
scoped_refptr<::media::SwitchableAudioRendererSink>
CastAudioDeviceFactory::CreateSwitchableAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) {
return base::MakeRefCounted<NonSwitchableAudioRendererSink>(NewOutputDevice(
render_frame_id, params, base::TimeDelta::FromSeconds(100)));
}
scoped_refptr<::media::AudioCapturerSource>
CastAudioDeviceFactory::CreateAudioCapturerSource(
int render_frame_id,
const ::media::AudioSourceParameters& params) {
// Use default implementation.
return nullptr;
}
} // namespace media
} // namespace chromecast
\ No newline at end of file
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMECAST_MEDIA_AUDIO_CAST_AUDIO_DEVICE_FACTORY_H_
#define CHROMECAST_MEDIA_AUDIO_CAST_AUDIO_DEVICE_FACTORY_H_
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "content/renderer/media/audio/audio_device_factory.h"
#include "media/audio/audio_sink_parameters.h"
namespace media {
class AudioCapturerSource;
class AudioRendererSink;
class SwitchableAudioRendererSink;
} // namespace media
namespace chromecast {
namespace media {
class CastAudioDeviceFactory : public content::AudioDeviceFactory {
public:
CastAudioDeviceFactory();
~CastAudioDeviceFactory() final;
scoped_refptr<::media::AudioRendererSink> CreateFinalAudioRendererSink(
int render_frame_id,
const ::media::AudioSinkParameters& params,
base::TimeDelta auth_timeout) override;
scoped_refptr<::media::AudioRendererSink> CreateAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) override;
scoped_refptr<::media::SwitchableAudioRendererSink>
CreateSwitchableAudioRendererSink(
content::AudioDeviceFactory::SourceType source_type,
int render_frame_id,
const ::media::AudioSinkParameters& params) override;
scoped_refptr<::media::AudioCapturerSource> CreateAudioCapturerSource(
int render_frame_id,
const ::media::AudioSourceParameters& params) override;
private:
DISALLOW_COPY_AND_ASSIGN(CastAudioDeviceFactory);
};
} // namespace media
} // namespace chromecast
#endif // CHROMECAST_MEDIA_AUDIO_CAST_AUDIO_DEVICE_FACTORY_H_
......@@ -298,5 +298,16 @@ CastAudioManager::GetMixerServiceConnectionFactoryForOutputStream(
return &mixer_service_connection_factory_;
}
#if defined(OS_ANDROID)
::media::AudioOutputStream* CastAudioManager::MakeAudioOutputStreamProxy(
const ::media::AudioParameters& params,
const std::string& device_id) {
// Override to use MakeAudioOutputStream to prevent the audio output stream
// from closing during pause/stop.
return MakeAudioOutputStream(params, device_id,
/*log_callback, not used*/ base::DoNothing());
}
#endif // defined(OS_ANDROID)
} // namespace media
} // namespace chromecast
......@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "build/build_config.h"
#include "chromecast/media/audio/mixer_service/mixer_service_connection_factory.h"
#include "media/audio/audio_manager_base.h"
#include "services/service_manager/public/cpp/connector.h"
......@@ -108,6 +109,12 @@ class CastAudioManager : public ::media::AudioManagerBase {
virtual ::media::AudioOutputStream* MakeMixerOutputStream(
const ::media::AudioParameters& params);
#if defined(OS_ANDROID)
::media::AudioOutputStream* MakeAudioOutputStreamProxy(
const ::media::AudioParameters& params,
const std::string& device_id) override;
#endif
private:
friend class CastAudioMixer;
friend class CastAudioManagerTest;
......
......@@ -55,6 +55,7 @@ const int64_t kInvalidTimestamp = std::numeric_limits<int64_t>::min();
constexpr base::TimeDelta kFadeTime = base::TimeDelta::FromMilliseconds(5);
constexpr base::TimeDelta kMixerStartThreshold =
base::TimeDelta::FromMilliseconds(60);
constexpr base::TimeDelta kRenderBufferSize = base::TimeDelta::FromSeconds(4);
} // namespace
namespace chromecast {
......@@ -105,10 +106,18 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate {
chromecast::mojom::MultiroomInfoPtr multiroom_info);
void Start(AudioSourceCallback* source_callback);
void Stop(base::WaitableEvent* finished);
void Flush(base::WaitableEvent* finished);
void Close(base::OnceClosure closure);
void SetVolume(double volume);
private:
enum class CmaBackendState {
kUinitialized,
kStopped,
kPaused,
kStarted,
};
void PushBuffer();
// CmaBackend::Decoder::Delegate implementation:
......@@ -126,6 +135,7 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate {
CmaBackendFactory* const cma_backend_factory_;
AudioOutputState media_thread_state_;
CmaBackendState cma_backend_state_ = CmaBackendState::kUinitialized;
::media::AudioTimestampHelper timestamp_helper_;
const base::TimeDelta buffer_duration_;
std::unique_ptr<TaskRunnerImpl> cma_backend_task_runner_;
......@@ -134,7 +144,8 @@ class CastAudioOutputStream::CmaWrapper : public CmaBackend::Decoder::Delegate {
base::OneShotTimer push_timer_;
bool push_in_progress_;
bool encountered_error_;
base::TimeTicks next_push_time_;
base::TimeTicks last_push_complete_time_;
base::TimeDelta render_buffer_size_estimate_ = kRenderBufferSize;
CmaBackend::AudioDecoder* audio_decoder_;
AudioSourceCallback* source_callback_;
......@@ -154,7 +165,8 @@ CastAudioOutputStream::CmaWrapper::CmaWrapper(
cma_backend_factory_(cma_backend_factory),
media_thread_state_(kClosed),
timestamp_helper_(audio_params_.sample_rate()),
buffer_duration_(audio_params_.GetBufferDuration()) {
buffer_duration_(audio_params_.GetBufferDuration()),
render_buffer_size_estimate_(kRenderBufferSize) {
DETACH_FROM_THREAD(media_thread_checker_);
DCHECK(audio_task_runner_);
DCHECK(cma_backend_factory_);
......@@ -222,6 +234,7 @@ void CastAudioOutputStream::CmaWrapper::Initialize(
encountered_error_ = true;
return;
}
cma_backend_state_ = CmaBackendState::kStopped;
audio_bus_ = ::media::AudioBus::Create(audio_params_);
timestamp_helper_.SetBaseTimestamp(base::TimeDelta());
......@@ -241,11 +254,19 @@ void CastAudioOutputStream::CmaWrapper::Start(
}
if (media_thread_state_ == kOpened) {
DCHECK(cma_backend_state_ == CmaBackendState::kPaused ||
cma_backend_state_ == CmaBackendState::kStopped);
if (cma_backend_state_ == CmaBackendState::kPaused) {
cma_backend_->Resume();
} else {
cma_backend_->Start(0);
render_buffer_size_estimate_ = kRenderBufferSize;
}
last_push_complete_time_ = base::TimeTicks::Now();
cma_backend_state_ = CmaBackendState::kStarted;
media_thread_state_ = kStarted;
cma_backend_->Start(0);
}
next_push_time_ = base::TimeTicks::Now();
if (!push_in_progress_) {
push_in_progress_ = true;
PushBuffer();
......@@ -256,8 +277,30 @@ void CastAudioOutputStream::CmaWrapper::Stop(base::WaitableEvent* finished) {
DCHECK_CALLED_ON_VALID_THREAD(media_thread_checker_);
// Prevent further pushes to the audio buffer after stopping.
push_timer_.Stop();
if (cma_backend_ && media_thread_state_ == kStarted) {
cma_backend_->Stop();
// Don't actually stop the backend. Stop() gets called when the stream is
// paused. We rely on Flush() to stop the backend.
if (cma_backend_) {
cma_backend_->Pause();
cma_backend_state_ = CmaBackendState::kPaused;
}
push_in_progress_ = false;
media_thread_state_ = kOpened;
source_callback_ = nullptr;
finished->Signal();
}
void CastAudioOutputStream::CmaWrapper::Flush(base::WaitableEvent* finished) {
DCHECK_CALLED_ON_VALID_THREAD(media_thread_checker_);
// Prevent further pushes to the audio buffer after stopping.
push_timer_.Stop();
if (cma_backend_ &&
(media_thread_state_ == kStarted || media_thread_state_ == kOpened)) {
if (cma_backend_state_ == CmaBackendState::kPaused ||
cma_backend_state_ == CmaBackendState::kStarted) {
cma_backend_->Stop();
cma_backend_state_ = CmaBackendState::kStopped;
}
}
push_in_progress_ = false;
media_thread_state_ = kOpened;
......@@ -270,8 +313,9 @@ void CastAudioOutputStream::CmaWrapper::Close(base::OnceClosure closure) {
// Prevent further pushes to the audio buffer after stopping.
push_timer_.Stop();
// Only stop the backend if it was started.
if (cma_backend_ && media_thread_state_ == kStarted) {
if (cma_backend_ && cma_backend_state_ != CmaBackendState::kStopped) {
cma_backend_->Stop();
cma_backend_state_ = CmaBackendState::kStopped;
}
push_in_progress_ = false;
media_thread_state_ = kPendingClose;
......@@ -308,19 +352,21 @@ void CastAudioOutputStream::CmaWrapper::PushBuffer() {
CmaBackend::AudioDecoder::RenderingDelay rendering_delay =
audio_decoder_->GetRenderingDelay();
base::TimeDelta delay =
base::TimeDelta::FromMicroseconds(rendering_delay.delay_microseconds);
base::TimeTicks delay_timestamp =
base::TimeTicks() +
base::TimeDelta::FromMicroseconds(rendering_delay.timestamp_microseconds);
// The delay must be greater than zero, and if the timestamp is invalid, we
// cannot trust the current delay.
if (rendering_delay.timestamp_microseconds == kInvalidTimestamp ||
rendering_delay.delay_microseconds < 0) {
base::TimeDelta delay;
if (rendering_delay.delay_microseconds < 0) {
delay = base::TimeDelta();
} else {
delay =
base::TimeDelta::FromMicroseconds(rendering_delay.delay_microseconds);
}
// This isn't actually used by audio_renderer_impl
base::TimeTicks delay_timestamp = base::TimeTicks();
if (rendering_delay.timestamp_microseconds != kInvalidTimestamp) {
delay_timestamp += base::TimeDelta::FromMicroseconds(
rendering_delay.timestamp_microseconds);
}
int frame_count =
source_callback_->OnMoreData(delay, delay_timestamp, 0, audio_bus_.get());
DVLOG(3) << "frames_filled=" << frame_count << " with latency=" << delay;
......@@ -357,9 +403,20 @@ void CastAudioOutputStream::CmaWrapper::OnPushBufferComplete(
// Schedule next push buffer.
const base::TimeTicks now = base::TimeTicks::Now();
next_push_time_ = std::max(now, next_push_time_ + buffer_duration_);
render_buffer_size_estimate_ -= buffer_duration_;
render_buffer_size_estimate_ += now - last_push_complete_time_;
last_push_complete_time_ = now;
base::TimeDelta delay;
if (render_buffer_size_estimate_ >= buffer_duration_) {
delay = base::TimeDelta::FromSeconds(0);
} else {
delay = buffer_duration_;
}
DVLOG(3) << "render_buffer_size_estimate_=" << render_buffer_size_estimate_
<< " delay=" << delay << " buffer_duration_=" << buffer_duration_;
base::TimeDelta delay = next_push_time_ - now;
push_timer_.Start(FROM_HERE, delay, this, &CmaWrapper::PushBuffer);
push_in_progress_ = true;
}
......@@ -386,6 +443,7 @@ class CastAudioOutputStream::MixerServiceWrapper
void Stop();
void Close(base::OnceClosure closure);
void SetVolume(double volume);
void Flush();
base::SingleThreadTaskRunner* io_task_runner() {
return io_task_runner_.get();
......@@ -484,6 +542,12 @@ void CastAudioOutputStream::MixerServiceWrapper::Stop() {
source_callback_ = nullptr;
}
void CastAudioOutputStream::MixerServiceWrapper::Flush() {
DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
// Nothing to do.
return;
}
void CastAudioOutputStream::MixerServiceWrapper::Close(
base::OnceClosure closure) {
DCHECK_CALLED_ON_VALID_THREAD(io_thread_checker_);
......@@ -692,7 +756,21 @@ void CastAudioOutputStream::Stop() {
void CastAudioOutputStream::Flush() {
DCHECK_CALLED_ON_VALID_THREAD(audio_thread_checker_);
DVLOG(2) << this << ": " << __func__;
// TODO(alexleung): Implement in follow-up CL.
// |cma_wrapper_| and |mixer_service_wrapper_| cannot be both active.
DCHECK(!(cma_wrapper_ && mixer_service_wrapper_));
if (cma_wrapper_) {
// Make sure this is not on the same thread as CMA_WRAPPER to prevent
// deadlock.
DCHECK(!audio_manager_->media_task_runner()->BelongsToCurrentThread());
base::WaitableEvent finished;
POST_TO_CMA_WRAPPER(Flush, base::Unretained(&finished));
finished.Wait();
} else if (mixer_service_wrapper_) {
POST_TO_MIXER_SERVICE_WRAPPER(Flush);
}
}
void CastAudioOutputStream::SetVolume(double volume) {
......
......@@ -12,6 +12,7 @@
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_checker.h"
#include "base/timer/timer.h"
#include "build/build_config.h"
#include "chromecast/base/task_runner_impl.h"
#include "chromecast/common/mojom/multiroom.mojom.h"
#include "chromecast/media/cma/backend/cma_backend.h"
......
......@@ -617,6 +617,10 @@ TEST_F(CastAudioOutputStreamTest, DeviceState) {
stream->Stop();
RunThreadsUntilIdle();
EXPECT_EQ(FakeCmaBackend::kStatePaused, cma_backend_->state());
stream->Flush();
RunThreadsUntilIdle();
EXPECT_EQ(FakeCmaBackend::kStateStopped, cma_backend_->state());
stream->Close();
......
......@@ -84,6 +84,8 @@ cast_source_set("renderer") {
"//chromecast/common/mojom",
"//mojo/public/cpp/bindings",
]
} else {
deps += [ "//chromecast/media/audio:cast_audio_device_factory" ]
}
if (enable_chromecast_extensions) {
......
......@@ -7,6 +7,7 @@
#include <utility>
#include "base/command_line.h"
#include "base/optional.h"
#include "base/strings/string_number_conversions.h"
#include "chromecast/base/bitstream_audio_codecs.h"
#include "chromecast/base/chromecast_switches.h"
......@@ -35,6 +36,7 @@
#include "third_party/blink/public/web/web_view.h"
#if defined(OS_ANDROID)
#include "chromecast/media/audio/cast_audio_device_factory.h"
#include "media/base/android/media_codec_util.h"
#else
#include "chromecast/renderer/memory_pressure_observer_impl.h"
......@@ -59,6 +61,18 @@
namespace chromecast {
namespace shell {
#if defined(OS_ANDROID)
// Audio renderer algorithm maximum capacity.
constexpr base::TimeDelta kAudioRendererMaxCapacity =
base::TimeDelta::FromSeconds(10);
// Audio renderer algorithm starting capacity. Configure large enough to
// prevent underrun.
constexpr base::TimeDelta kAudioRendererStartingCapacity =
base::TimeDelta::FromMilliseconds(5000);
constexpr base::TimeDelta kAudioRendererStartingCapacityEncrypted =
base::TimeDelta::FromMilliseconds(5500);
#endif // defined(OS_ANDROID)
CastContentRendererClient::CastContentRendererClient()
: supported_profiles_(new media::SupportedCodecProfileLevelsMemo()),
app_media_capabilities_observer_binding_(this),
......@@ -71,6 +85,10 @@ CastContentRendererClient::CastContentRendererClient()
// instance, which caches the platform decoder supported state when it is
// constructed.
::media::EnablePlatformDecoderSupport();
// Registers a custom content::AudioDeviceFactory
cast_audio_device_factory_ =
std::make_unique<media::CastAudioDeviceFactory>();
#endif // OS_ANDROID
}
......@@ -310,7 +328,16 @@ CastContentRendererClient::CreateURLLoaderThrottleProvider(
base::Optional<::media::AudioRendererAlgorithmParameters>
CastContentRendererClient::GetAudioRendererAlgorithmParameters(
::media::AudioParameters audio_parameters) {
#if defined(OS_ANDROID)
::media::AudioRendererAlgorithmParameters parameters;
parameters.max_capacity = kAudioRendererMaxCapacity;
parameters.starting_capacity = kAudioRendererStartingCapacity;
parameters.starting_capacity_for_encrypted =
kAudioRendererStartingCapacityEncrypted;
return base::Optional<::media::AudioRendererAlgorithmParameters>(parameters);
#else
return base::nullopt;
#endif
}
} // namespace shell
......
......@@ -32,6 +32,10 @@ class MemoryPressureObserverImpl;
namespace media {
class MediaCapsObserverImpl;
class SupportedCodecProfileLevelsMemo;
#if defined(OS_ANDROID)
class CastAudioDeviceFactory;
#endif // defined(OS_ANDROID)
}
namespace shell {
......@@ -106,6 +110,10 @@ class CastContentRendererClient
guest_view_container_dispatcher_;
#endif
#if defined(OS_ANDROID)
std::unique_ptr<media::CastAudioDeviceFactory> cast_audio_device_factory_;
#endif
int supported_bitstream_audio_codecs_;
DISALLOW_COPY_AND_ASSIGN(CastContentRendererClient);
......
......@@ -28,6 +28,7 @@ target(link_target_type, "renderer") {
"//content/app:*",
"//content/public/renderer:renderer_sources",
"//content/renderer:audio_decoder_fuzzer",
"//chromecast/media/audio:*",
]
sources = [
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment