Commit 9a421fa1 authored by Sam Zackrisson's avatar Sam Zackrisson Committed by Commit Bot

Remove redundant APM render buffer

This CL removes the render_fifo_ buffer from MediaStreamAudioProcessor
and DCHECKs that the input audio is 10 ms long.

MediaStreamAudioProcessor currently buffers audio provided to
OnPlayoutData, to feed frames in 10 ms chunks to the APM. However, the
caller of OnPlayoutData already DCHECKs that this is the case.

This CL also modifies the unit tests. They use the same
configuration for capture input as for render input, even though only
capture needs to support input other than 10 ms.

Bug: 804277

Change-Id: Ia20cf0e0810c59570305f1ca295016e0ca11057e
Reviewed-on: https://chromium-review.googlesource.com/875862
Commit-Queue: Sam Zackrisson <saza@chromium.org>
Reviewed-by: default avatarOlga Sharonova <olka@chromium.org>
Reviewed-by: default avatarHenrik Grunell <grunell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#533990}
parent 875789ea
......@@ -484,6 +484,17 @@ void MediaStreamAudioProcessor::OnPlayoutData(media::AudioBus* audio_bus,
#else
DCHECK(!audio_processing_->echo_control_mobile()->is_enabled());
#endif
DCHECK_GE(audio_bus->channels(), 1);
DCHECK_LE(audio_bus->channels(), 2);
int frames_per_10_ms = sample_rate / 100;
if (audio_bus->frames() != frames_per_10_ms) {
if (unsupported_buffer_size_log_count_ < 10) {
LOG(ERROR) << "MSAP::OnPlayoutData: Unsupported audio buffer size "
<< audio_bus->frames() << ", expected " << frames_per_10_ms;
++unsupported_buffer_size_log_count_;
}
return;
}
TRACE_EVENT1("audio", "MediaStreamAudioProcessor::OnPlayoutData",
"delay (ms)", audio_delay_milliseconds);
......@@ -491,20 +502,20 @@ void MediaStreamAudioProcessor::OnPlayoutData(media::AudioBus* audio_bus,
std::numeric_limits<base::subtle::Atomic32>::max());
base::subtle::Release_Store(&render_delay_ms_, audio_delay_milliseconds);
InitializeRenderFifoIfNeeded(sample_rate, audio_bus->channels(),
audio_bus->frames());
render_fifo_->Push(
*audio_bus, base::TimeDelta::FromMilliseconds(audio_delay_milliseconds));
MediaStreamAudioBus* analysis_bus;
base::TimeDelta audio_delay;
while (render_fifo_->Consume(&analysis_bus, &audio_delay)) {
// TODO(ajm): Should AnalyzeReverseStream() account for the |audio_delay|?
audio_processing_->AnalyzeReverseStream(
analysis_bus->channel_ptrs(),
analysis_bus->bus()->frames(),
sample_rate,
ChannelsToLayout(audio_bus->channels()));
std::vector<const float*> channel_ptrs(audio_bus->channels());
for (int i = 0; i < audio_bus->channels(); ++i)
channel_ptrs[i] = audio_bus->channel(i);
// TODO(ajm): Should AnalyzeReverseStream() account for the
// |audio_delay_milliseconds|?
const int apm_error = audio_processing_->AnalyzeReverseStream(
channel_ptrs.data(), audio_bus->frames(), sample_rate,
ChannelsToLayout(audio_bus->channels()));
if (apm_error != webrtc::AudioProcessing::kNoError &&
apm_playout_error_code_log_count_ < 10) {
LOG(ERROR) << "MSAP::OnPlayoutData: AnalyzeReverseStream error="
<< apm_error;
++apm_playout_error_code_log_count_;
}
}
......@@ -513,13 +524,11 @@ void MediaStreamAudioProcessor::OnPlayoutDataSourceChanged() {
// There is no need to hold a lock here since the caller guarantees that
// there is no more OnPlayoutData() callback on the render thread.
render_thread_checker_.DetachFromThread();
render_fifo_.reset();
}
void MediaStreamAudioProcessor::OnRenderThreadChanged() {
render_thread_checker_.DetachFromThread();
DCHECK(render_thread_checker_.CalledOnValidThread());
render_fifo_->ReattachThreadChecker();
}
void MediaStreamAudioProcessor::GetStats(AudioProcessorStats* stats) {
......@@ -738,33 +747,6 @@ void MediaStreamAudioProcessor::InitializeCaptureFifo(
}
}
void MediaStreamAudioProcessor::InitializeRenderFifoIfNeeded(
int sample_rate, int number_of_channels, int frames_per_buffer) {
DCHECK(render_thread_checker_.CalledOnValidThread());
if (render_fifo_.get() &&
render_format_.sample_rate() == sample_rate &&
render_format_.channels() == number_of_channels &&
render_format_.frames_per_buffer() == frames_per_buffer) {
// Do nothing if the |render_fifo_| has been setup properly.
return;
}
render_format_ = media::AudioParameters(
media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
media::GuessChannelLayout(number_of_channels),
sample_rate,
16,
frames_per_buffer);
const int analysis_frames = sample_rate / 100; // 10 ms chunks.
render_fifo_.reset(
new MediaStreamAudioFifo(number_of_channels,
number_of_channels,
frames_per_buffer,
analysis_frames,
sample_rate));
}
int MediaStreamAudioProcessor::ProcessData(const float* const* process_ptrs,
int process_frames,
base::TimeDelta capture_delay,
......
......@@ -186,16 +186,11 @@ class CONTENT_EXPORT MediaStreamAudioProcessor
// Receives processing output.
std::unique_ptr<MediaStreamAudioBus> output_bus_;
// FIFO to provide 10 ms render chunks when the AEC is enabled.
std::unique_ptr<MediaStreamAudioFifo> render_fifo_;
// These are mutated on the main render thread in OnCaptureFormatChanged().
// The caller guarantees this does not run concurrently with accesses on the
// capture audio thread.
media::AudioParameters input_format_;
media::AudioParameters output_format_;
// Only used on the render audio thread.
media::AudioParameters render_format_;
// Raw pointer to the WebRtcPlayoutDataSource, which is valid for the
// lifetime of RenderThread.
......@@ -224,6 +219,10 @@ class CONTENT_EXPORT MediaStreamAudioProcessor
// Flag to avoid executing Stop() more than once.
bool stopped_;
// Counters to avoid excessively logging errors in OnPlayoutData.
size_t unsupported_buffer_size_log_count_ = 0;
size_t apm_playout_error_code_log_count_ = 0;
// Object for logging UMA stats for echo information when the AEC is enabled.
// Accessed on the main render thread.
std::unique_ptr<EchoInformation> echo_information_;
......
......@@ -115,7 +115,7 @@ class MediaStreamAudioProcessorTest : public ::testing::Test {
media::CHANNEL_LAYOUT_STEREO,
48000,
16,
512) {}
480) {}
protected:
// Helper method to save duplicated code.
......@@ -304,8 +304,7 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_TestAllSampleRates) {
static const int kSupportedSampleRates[] =
{ 8000, 16000, 22050, 32000, 44100, 48000 };
for (size_t i = 0; i < arraysize(kSupportedSampleRates); ++i) {
int buffer_size = (kSupportedSampleRates[i] / 100) < 128 ?
kSupportedSampleRates[i] / 100 : 128;
int buffer_size = kSupportedSampleRates[i] / 100;
media::AudioParameters params(
media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
media::CHANNEL_LAYOUT_STEREO, kSupportedSampleRates[i], 16,
......@@ -456,7 +455,7 @@ TEST_F(MediaStreamAudioProcessorTest, MAYBE_TestWithKeyboardMicChannel) {
media::AudioParameters params(media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
media::CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC,
48000, 16, 512);
48000, 16, 480);
audio_processor->OnCaptureFormatChanged(params);
ProcessDataAndVerifyFormat(audio_processor.get(),
......
......@@ -67,13 +67,14 @@ void WebRtcAudioDeviceImpl::RenderData(media::AudioBus* audio_bus,
output_delay_ms_ = audio_delay_milliseconds;
}
render_buffer_.resize(audio_bus->frames() * audio_bus->channels());
int frames_per_10_ms = sample_rate / 100;
int bytes_per_sample = sizeof(render_buffer_[0]);
// Client should always ask for 10ms.
const int frames_per_10_ms = sample_rate / 100;
DCHECK_EQ(audio_bus->frames(), frames_per_10_ms);
DCHECK_GE(audio_bus->channels(), 1);
DCHECK_LE(audio_bus->channels(), 2);
// Get 10ms audio and copy result to temporary byte buffer.
render_buffer_.resize(audio_bus->frames() * audio_bus->channels());
const int bytes_per_sample = sizeof(render_buffer_[0]);
static const int kBitsPerByte = 8;
int64_t elapsed_time_ms = -1;
int64_t ntp_time_ms = -1;
......
......@@ -50,6 +50,7 @@ class WebRtcAudioRenderer;
class WebRtcAudioRendererSource {
public:
// Callback to get the rendered data.
// |audio_bus| must have buffer size |sample_rate/100| and 1-2 channels.
virtual void RenderData(media::AudioBus* audio_bus,
int sample_rate,
int audio_delay_milliseconds,
......@@ -77,6 +78,7 @@ class WebRtcPlayoutDataSource {
public:
// Callback to get the playout data.
// Called on the audio render thread.
// |audio_bus| must have buffer size |sample_rate/100| and 1-2 channels.
virtual void OnPlayoutData(media::AudioBus* audio_bus,
int sample_rate,
int audio_delay_milliseconds) = 0;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment