Commit 37ae45f4 authored by Ken MacKay's avatar Ken MacKay Committed by Commit Bot

[Chromecast] Add public API for direct audio playback

BUG= internal b/71559266

Change-Id: Ia7d00e82bca0a50385c58b853b4cba05670e0dde
Reviewed-on: https://chromium-review.googlesource.com/937812
Commit-Queue: Kenneth MacKay <kmackay@chromium.org>
Reviewed-by: default avatarLuke Halliwell <halliwell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#540671}
parent 46b3460f
...@@ -117,16 +117,28 @@ cast_source_set("public") { ...@@ -117,16 +117,28 @@ cast_source_set("public") {
] ]
} }
cast_source_set("audio_helpers") {
sources = [
"audio_fader.cc",
"audio_fader.h",
]
deps = [
"//base",
"//media",
]
}
cast_source_set("for_mixer_audio") { cast_source_set("for_mixer_audio") {
sources = [ sources = [
"audio_decoder_for_mixer.cc", "audio_decoder_for_mixer.cc",
"audio_decoder_for_mixer.h", "audio_decoder_for_mixer.h",
"audio_fader.cc",
"audio_fader.h",
"buffering_mixer_source.cc", "buffering_mixer_source.cc",
"buffering_mixer_source.h", "buffering_mixer_source.h",
"cast_audio_json.cc", "cast_audio_json.cc",
"cast_audio_json.h", "cast_audio_json.h",
"direct_mixer_source.cc",
"direct_mixer_source.h",
"filter_group.cc", "filter_group.cc",
"filter_group.h", "filter_group.h",
"media_codec_support_cast_audio.cc", "media_codec_support_cast_audio.cc",
...@@ -150,6 +162,7 @@ cast_source_set("for_mixer_audio") { ...@@ -150,6 +162,7 @@ cast_source_set("for_mixer_audio") {
deps = [ deps = [
":audio_buildflags", ":audio_buildflags",
":audio_helpers",
":null", ":null",
":public", ":public",
"//base", "//base",
...@@ -183,6 +196,7 @@ test("cast_audio_backend_unittests") { ...@@ -183,6 +196,7 @@ test("cast_audio_backend_unittests") {
] ]
deps = [ deps = [
":audio_helpers",
":for_mixer_audio", ":for_mixer_audio",
":public", ":public",
"//base", "//base",
......
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromecast/media/cma/backend/direct_mixer_source.h"
#include "base/logging.h"
#include "chromecast/media/cma/backend/stream_mixer.h"
#include "chromecast/public/media/direct_audio_source.h"
#include "chromecast/public/media/media_pipeline_device_params.h"
#include "media/base/audio_bus.h"
namespace chromecast {
namespace media {
namespace {
const int kNumOutputChannels = 2;
std::string AudioContentTypeToString(media::AudioContentType type) {
switch (type) {
case media::AudioContentType::kAlarm:
return "alarm";
case media::AudioContentType::kCommunication:
return "communication";
default:
return "media";
}
}
} // namespace
// static
DirectAudioSourceToken* CastMediaShlib::AddDirectAudioSource(
DirectAudioSource* source,
const MediaPipelineDeviceParams& params,
int source_sample_rate,
int playout_channel) {
DCHECK(source);
return new DirectMixerSource(source, params, source_sample_rate,
playout_channel);
}
// static
void CastMediaShlib::RemoveDirectAudioSource(DirectAudioSourceToken* token) {
DirectMixerSource* source = static_cast<DirectMixerSource*>(token);
source->Remove();
}
DirectMixerSource::DirectMixerSource(DirectAudioSource* direct_source,
const MediaPipelineDeviceParams& params,
int source_sample_rate,
int playout_channel)
: source_(direct_source),
num_channels_(kNumOutputChannels),
input_samples_per_second_(source_sample_rate),
primary_(params.audio_type !=
MediaPipelineDeviceParams::kAudioStreamSoundEffects),
device_id_(params.device_id),
content_type_(params.content_type),
playout_channel_(playout_channel),
mixer_(StreamMixer::Get()),
channel_vector_(num_channels_) {
DCHECK(source_);
LOG(INFO) << "Create " << device_id_ << " (" << this
<< "), content type = " << AudioContentTypeToString(content_type_);
DCHECK(source_);
DCHECK(mixer_);
mixer_->AddInput(this);
}
DirectMixerSource::~DirectMixerSource() {
LOG(INFO) << "Destroy " << device_id_ << " (" << this << ")";
}
int DirectMixerSource::num_channels() {
return num_channels_;
}
int DirectMixerSource::input_samples_per_second() {
return input_samples_per_second_;
}
bool DirectMixerSource::primary() {
return primary_;
}
const std::string& DirectMixerSource::device_id() {
return device_id_;
}
AudioContentType DirectMixerSource::content_type() {
return content_type_;
}
int DirectMixerSource::desired_read_size() {
return source_->GetDesiredFillSize();
}
int DirectMixerSource::playout_channel() {
return playout_channel_;
}
void DirectMixerSource::SetVolumeMultiplier(float multiplier) {
mixer_->SetVolumeMultiplier(this, multiplier);
}
void DirectMixerSource::InitializeAudioPlayback(
int read_size,
RenderingDelay initial_rendering_delay) {
source_->InitializeAudioPlayback(read_size, initial_rendering_delay);
}
int DirectMixerSource::FillAudioPlaybackFrames(int num_frames,
RenderingDelay rendering_delay,
::media::AudioBus* buffer) {
DCHECK(buffer);
DCHECK_EQ(num_channels_, buffer->channels());
DCHECK_GE(buffer->frames(), num_frames);
for (int c = 0; c < num_channels_; ++c) {
channel_vector_[c] = buffer->channel(c);
}
return source_->FillAudioPlaybackFrames(num_frames, rendering_delay,
channel_vector_);
}
void DirectMixerSource::OnAudioPlaybackError(MixerError error) {
if (error == MixerError::kInputIgnored) {
LOG(INFO) << "Mixer input " << device_id_ << " (" << this << ")"
<< " now being ignored due to output sample rate change";
}
source_->OnAudioPlaybackError();
}
void DirectMixerSource::Remove() {
LOG(INFO) << "Remove " << device_id_ << " (" << this << ")";
mixer_->RemoveInput(this);
}
void DirectMixerSource::FinalizeAudioPlayback() {
source_->OnAudioPlaybackComplete();
delete this;
}
} // namespace media
} // namespace chromecast
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMECAST_MEDIA_CMA_BACKEND_DIRECT_MIXER_SOURCE_H_
#define CHROMECAST_MEDIA_CMA_BACKEND_DIRECT_MIXER_SOURCE_H_
#include <string>
#include <vector>
#include "base/macros.h"
#include "chromecast/media/cma/backend/mixer_input.h"
#include "chromecast/public/media/media_pipeline_backend.h"
#include "chromecast/public/volume_control.h"
namespace media {
class AudioBus;
} // namespace media
namespace chromecast {
namespace media {
class DirectAudioSource;
struct MediaPipelineDeviceParams;
class StreamMixer;
// Empty interface so we can use a pointer to DirectMixerSource as the token.
class DirectAudioSourceToken {};
// A simple adapter for DirectAudioSource to map the public API to the
// MixerInput::Source API.
class DirectMixerSource : public MixerInput::Source,
public DirectAudioSourceToken {
public:
using RenderingDelay = MediaPipelineBackend::AudioDecoder::RenderingDelay;
DirectMixerSource(DirectAudioSource* direct_source,
const MediaPipelineDeviceParams& params,
int source_sample_rate,
int playout_channel);
// Sets the volume multiplier for this stream. If |multiplier| < 0, sets the
// volume multiplier to 0.
void SetVolumeMultiplier(float multiplier);
// Removes this source from the mixer asynchronously. After this method is
// called, no more calls will be made to delegate methods. The source will
// be removed from the mixer once it has faded out appropriately.
void Remove();
private:
~DirectMixerSource() override;
// MixerInput::Source implementation:
int num_channels() override;
int input_samples_per_second() override;
bool primary() override;
const std::string& device_id() override;
AudioContentType content_type() override;
int desired_read_size() override;
int playout_channel() override;
void InitializeAudioPlayback(int read_size,
RenderingDelay initial_rendering_delay) override;
int FillAudioPlaybackFrames(int num_frames,
RenderingDelay rendering_delay,
::media::AudioBus* buffer) override;
void OnAudioPlaybackError(MixerError error) override;
void FinalizeAudioPlayback() override;
DirectAudioSource* const source_;
const int num_channels_;
const int input_samples_per_second_;
const bool primary_;
const std::string device_id_;
const AudioContentType content_type_;
const int playout_channel_;
StreamMixer* const mixer_;
std::vector<float*> channel_vector_;
DISALLOW_COPY_AND_ASSIGN(DirectMixerSource);
};
} // namespace media
} // namespace chromecast
#endif // CHROMECAST_MEDIA_CMA_BACKEND_DIRECT_MIXER_SOURCE_H_
...@@ -160,6 +160,14 @@ void MediaPipelineBackendManager::LogicalResume(MediaPipelineBackend* backend) { ...@@ -160,6 +160,14 @@ void MediaPipelineBackendManager::LogicalResume(MediaPipelineBackend* backend) {
wrapper->LogicalResume(); wrapper->LogicalResume();
} }
void MediaPipelineBackendManager::AddExtraPlayingStream(bool sfx) {
UpdatePlayingAudioCount(sfx, 1);
}
void MediaPipelineBackendManager::RemoveExtraPlayingStream(bool sfx) {
UpdatePlayingAudioCount(sfx, -1);
}
void MediaPipelineBackendManager::SetGlobalVolumeMultiplier( void MediaPipelineBackendManager::SetGlobalVolumeMultiplier(
AudioContentType type, AudioContentType type,
float multiplier) { float multiplier) {
......
...@@ -104,7 +104,14 @@ class MediaPipelineBackendManager { ...@@ -104,7 +104,14 @@ class MediaPipelineBackendManager {
void LogicalPause(MediaPipelineBackend* backend); void LogicalPause(MediaPipelineBackend* backend);
void LogicalResume(MediaPipelineBackend* backend); void LogicalResume(MediaPipelineBackend* backend);
// Sets a global multiplier for output volume for streams fo the given |type|. // Add/remove a playing audio stream that is not accounted for by a
// MediaPipelineBackend instance (for example, direct audio output using
// CastMediaShlib::AddDirectAudioSource()). |sfx| indicates whether or not
// the stream is a sound effects stream (has no effect on volume feedback).
void AddExtraPlayingStream(bool sfx);
void RemoveExtraPlayingStream(bool sfx);
// Sets a global multiplier for output volume for streams of the given |type|.
// The multiplier may be any value >= 0; if the resulting volume for an // The multiplier may be any value >= 0; if the resulting volume for an
// individual stream would be > 1.0, that stream's volume is clamped to 1.0. // individual stream would be > 1.0, that stream's volume is clamped to 1.0.
// The default multiplier is 1.0. May be called on any thread. // The default multiplier is 1.0. May be called on any thread.
......
...@@ -17,6 +17,8 @@ namespace media { ...@@ -17,6 +17,8 @@ namespace media {
enum SampleFormat : int; enum SampleFormat : int;
class DirectAudioSource;
class DirectAudioSourceToken;
class MediaPipelineBackend; class MediaPipelineBackend;
struct MediaPipelineDeviceParams; struct MediaPipelineDeviceParams;
class VideoPlane; class VideoPlane;
...@@ -132,6 +134,24 @@ class CHROMECAST_EXPORT CastMediaShlib { ...@@ -132,6 +134,24 @@ class CHROMECAST_EXPORT CastMediaShlib {
static bool SetVideoPlaneImage(int width, int height, const uint8_t* data) static bool SetVideoPlaneImage(int width, int height, const uint8_t* data)
__attribute__((__weak__)); __attribute__((__weak__));
static void ClearVideoPlaneImage() __attribute__((__weak__)); static void ClearVideoPlaneImage() __attribute__((__weak__));
// Sets up a direct audio source for output. The media backend will pull audio
// directly from |source| whenever more output data is needed; this provides
// low-latency output. The source must remain valid until
// OnAudioPlaybackComplete() has been called on it.
// Returns nullptr if a direct source cannot be added. Otherwise, returns a
// token that must be passed to RemoveDirectAudioSource() to remove the source
// when desired.
static DirectAudioSourceToken* AddDirectAudioSource(
DirectAudioSource* source,
const MediaPipelineDeviceParams& params,
int source_sample_rate,
int playout_channel) __attribute__((__weak__));
// Removes a direct audio source, given the |token| that was returned by
// AddDirectAudioSource().
static void RemoveDirectAudioSource(DirectAudioSourceToken* token)
__attribute__((__weak__));
}; };
} // namespace media } // namespace media
......
...@@ -12,6 +12,7 @@ cast_source_set("media") { ...@@ -12,6 +12,7 @@ cast_source_set("media") {
"cast_decrypt_config.h", "cast_decrypt_config.h",
"cast_key_system.h", "cast_key_system.h",
"decoder_config.h", "decoder_config.h",
"direct_audio_source.h",
"media_pipeline_backend.h", "media_pipeline_backend.h",
"media_pipeline_device_params.h", "media_pipeline_device_params.h",
"stream_id.h", "stream_id.h",
......
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMECAST_PUBLIC_MEDIA_DIRECT_AUDIO_SOURCE_H_
#define CHROMECAST_PUBLIC_MEDIA_DIRECT_AUDIO_SOURCE_H_
#include <vector>
#include "media_pipeline_backend.h"
namespace chromecast {
namespace media {
// Direct audio source for the backend, used for low-latency audio output.
// All methods are called from a high-priority thread that is internal to the
// media backend, and therefore all methods should be threadsafe. Once a direct
// audio source has been added to the backend, it must not be deleted until
// after OnAudioPlaybackComplete() has been called on it.
class DirectAudioSource {
public:
using RenderingDelay = MediaPipelineBackend::AudioDecoder::RenderingDelay;
// Returns the desired playback buffer size in frames. This is the desired
// value for |num_frames| when FillAudioPlaybackFrames(); it affects the
// playback latency (larger value = higher latency). The backend may choose a
// different actual buffer size.
virtual int GetDesiredFillSize() = 0;
// Called when the source has been added to the backend, before any other
// calls are made. The |read_size| is the number of frames that will be
// requested for each call to FillAudioPlaybackFrames(). The
// |initial_rendering_delay| is the rendering delay estimate for the first
// call to FillAudioPlaybackFrames().
virtual void InitializeAudioPlayback(
int read_size,
RenderingDelay initial_rendering_delay) = 0;
// Called to read more audio data from the source. The source must fill in
// the |channels| with up to |num_frames| of audio. Note that only planar
// float format is supported.The |rendering_delay| indicates when the first
// frame of the filled data will be played out.
// Returns the number of frames filled.
virtual int FillAudioPlaybackFrames(int num_frames,
RenderingDelay rendering_delay,
const std::vector<float*>& channels) = 0;
// Called when an error occurs in audio playback. FillAudioPlaybackFrames()
// will not be called after an error occurs.
virtual void OnAudioPlaybackError() = 0;
// Called when audio playback is complete for this source. The source can only
// be safely deleted after OnAudioPlaybackComplete() has been called.
virtual void OnAudioPlaybackComplete() = 0;
protected:
virtual ~DirectAudioSource() = default;
};
} // namespace media
} // namespace chromecast
#endif // CHROMECAST_PUBLIC_MEDIA_DIRECT_AUDIO_SOURCE_H_
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#define CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_BACKEND_H_ #define CHROMECAST_PUBLIC_MEDIA_MEDIA_PIPELINE_BACKEND_H_
#include <stdint.h> #include <stdint.h>
#include <string> #include <string>
#include "cast_key_status.h" #include "cast_key_status.h"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment