Commit 1f45ded8 authored by Andrew MacPherson's avatar Andrew MacPherson Committed by Commit Bot

Add support for AudioContextOptions sampleRate

Optional sampleRate parameter to AudioContextOptions, if provided then
the AudioContext will run at this sampleRate, otherwise it will run at
the hardware rate.

Running the AudioContext at a lower sample rate can allow running a
heavier graph on low-end devices, for example if a given device is able
to run with X nodes at a sample rate of 48k, it can support roughly
2X nodes at 24k or 4X nodes at 12k.

Bug: 432248
Change-Id: I835f28b4625763bd6a6ddbee9b89636ef8a8a066
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1482957Reviewed-by: default avatarKent Tamura <tkent@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Reviewed-by: default avatarHongchan Choi <hongchan@chromium.org>
Commit-Queue: Andrew MacPherson <andrew.macpherson@soundtrap.com>
Cr-Commit-Position: refs/heads/master@{#638498}
parent 895b3265
...@@ -68,8 +68,14 @@ AudioContext* AudioContext::Create(Document& document, ...@@ -68,8 +68,14 @@ AudioContext* AudioContext::Create(Document& document,
WebAudioLatencyHint(context_options->latencyHint().GetAsDouble()); WebAudioLatencyHint(context_options->latencyHint().GetAsDouble());
} }
base::Optional<float> sample_rate;
if (context_options->hasSampleRate()) {
sample_rate = context_options->sampleRate();
}
AudioContext* audio_context = AudioContext* audio_context =
MakeGarbageCollected<AudioContext>(document, latency_hint); MakeGarbageCollected<AudioContext>(document, latency_hint, sample_rate);
++g_hardware_context_count;
audio_context->UpdateStateIfNeeded(); audio_context->UpdateStateIfNeeded();
if (!audio_utilities::IsValidAudioBufferSampleRate( if (!audio_utilities::IsValidAudioBufferSampleRate(
...@@ -97,7 +103,6 @@ AudioContext* AudioContext::Create(Document& document, ...@@ -97,7 +103,6 @@ AudioContext* AudioContext::Create(Document& document,
audio_context->StartRendering(); audio_context->StartRendering();
audio_context->SetContextState(kRunning); audio_context->SetContextState(kRunning);
} }
++g_hardware_context_count;
#if DEBUG_AUDIONODE_REFERENCES #if DEBUG_AUDIONODE_REFERENCES
fprintf(stderr, "[%16p]: AudioContext::AudioContext(): %u #%u\n", fprintf(stderr, "[%16p]: AudioContext::AudioContext(): %u #%u\n",
audio_context, audio_context->context_id_, g_hardware_context_count); audio_context, audio_context->context_id_, g_hardware_context_count);
...@@ -117,10 +122,12 @@ AudioContext* AudioContext::Create(Document& document, ...@@ -117,10 +122,12 @@ AudioContext* AudioContext::Create(Document& document,
} }
AudioContext::AudioContext(Document& document, AudioContext::AudioContext(Document& document,
const WebAudioLatencyHint& latency_hint) const WebAudioLatencyHint& latency_hint,
base::Optional<float> sample_rate)
: BaseAudioContext(&document, kRealtimeContext), : BaseAudioContext(&document, kRealtimeContext),
context_id_(g_context_id++) { context_id_(g_context_id++) {
destination_node_ = RealtimeAudioDestinationNode::Create(this, latency_hint); destination_node_ =
RealtimeAudioDestinationNode::Create(this, latency_hint, sample_rate);
switch (GetAutoplayPolicy()) { switch (GetAutoplayPolicy()) {
case AutoplayPolicy::Type::kNoUserGestureRequired: case AutoplayPolicy::Type::kNoUserGestureRequired:
......
...@@ -37,7 +37,9 @@ class MODULES_EXPORT AudioContext : public BaseAudioContext { ...@@ -37,7 +37,9 @@ class MODULES_EXPORT AudioContext : public BaseAudioContext {
const AudioContextOptions*, const AudioContextOptions*,
ExceptionState&); ExceptionState&);
AudioContext(Document&, const WebAudioLatencyHint&); AudioContext(Document&,
const WebAudioLatencyHint&,
base::Optional<float> sample_rate);
~AudioContext() override; ~AudioContext() override;
void Trace(blink::Visitor*) override; void Trace(blink::Visitor*) override;
......
...@@ -8,4 +8,5 @@ dictionary AudioContextOptions { ...@@ -8,4 +8,5 @@ dictionary AudioContextOptions {
// seconds, without taking into account double buffering (same as // seconds, without taking into account double buffering (same as
// AudioContext.baseLatency). // AudioContext.baseLatency).
(AudioContextLatencyCategory or double) latencyHint = "interactive"; (AudioContextLatencyCategory or double) latencyHint = "interactive";
float sampleRate;
}; };
...@@ -39,17 +39,20 @@ ...@@ -39,17 +39,20 @@
namespace blink { namespace blink {
scoped_refptr<RealtimeAudioDestinationHandler> scoped_refptr<RealtimeAudioDestinationHandler>
RealtimeAudioDestinationHandler::Create( RealtimeAudioDestinationHandler::Create(AudioNode& node,
AudioNode& node, const WebAudioLatencyHint& latency_hint,
const WebAudioLatencyHint& latency_hint) { base::Optional<float> sample_rate) {
return base::AdoptRef( return base::AdoptRef(
new RealtimeAudioDestinationHandler(node, latency_hint)); new RealtimeAudioDestinationHandler(node, latency_hint, sample_rate));
} }
RealtimeAudioDestinationHandler::RealtimeAudioDestinationHandler( RealtimeAudioDestinationHandler::RealtimeAudioDestinationHandler(
AudioNode& node, AudioNode& node,
const WebAudioLatencyHint& latency_hint) const WebAudioLatencyHint& latency_hint,
: AudioDestinationHandler(node), latency_hint_(latency_hint) { base::Optional<float> sample_rate)
: AudioDestinationHandler(node),
latency_hint_(latency_hint),
sample_rate_(sample_rate) {
// Node-specific default channel count and mixing rules. // Node-specific default channel count and mixing rules.
channel_count_ = 2; channel_count_ = 2;
SetInternalChannelCountMode(kExplicit); SetInternalChannelCountMode(kExplicit);
...@@ -235,8 +238,8 @@ int RealtimeAudioDestinationHandler::GetFramesPerBuffer() const { ...@@ -235,8 +238,8 @@ int RealtimeAudioDestinationHandler::GetFramesPerBuffer() const {
} }
void RealtimeAudioDestinationHandler::CreatePlatformDestination() { void RealtimeAudioDestinationHandler::CreatePlatformDestination() {
platform_destination_ = platform_destination_ = AudioDestination::Create(*this, ChannelCount(),
AudioDestination::Create(*this, ChannelCount(), latency_hint_); latency_hint_, sample_rate_);
} }
void RealtimeAudioDestinationHandler::StartPlatformDestination() { void RealtimeAudioDestinationHandler::StartPlatformDestination() {
...@@ -267,16 +270,19 @@ void RealtimeAudioDestinationHandler::StopPlatformDestination() { ...@@ -267,16 +270,19 @@ void RealtimeAudioDestinationHandler::StopPlatformDestination() {
RealtimeAudioDestinationNode::RealtimeAudioDestinationNode( RealtimeAudioDestinationNode::RealtimeAudioDestinationNode(
AudioContext& context, AudioContext& context,
const WebAudioLatencyHint& latency_hint) const WebAudioLatencyHint& latency_hint,
base::Optional<float> sample_rate)
: AudioDestinationNode(context) { : AudioDestinationNode(context) {
SetHandler(RealtimeAudioDestinationHandler::Create(*this, latency_hint)); SetHandler(RealtimeAudioDestinationHandler::Create(*this, latency_hint,
sample_rate));
} }
RealtimeAudioDestinationNode* RealtimeAudioDestinationNode::Create( RealtimeAudioDestinationNode* RealtimeAudioDestinationNode::Create(
AudioContext* context, AudioContext* context,
const WebAudioLatencyHint& latency_hint) { const WebAudioLatencyHint& latency_hint,
return MakeGarbageCollected<RealtimeAudioDestinationNode>(*context, base::Optional<float> sample_rate) {
latency_hint); return MakeGarbageCollected<RealtimeAudioDestinationNode>(
*context, latency_hint, sample_rate);
} }
} // namespace blink } // namespace blink
...@@ -43,7 +43,8 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler, ...@@ -43,7 +43,8 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler,
public: public:
static scoped_refptr<RealtimeAudioDestinationHandler> Create( static scoped_refptr<RealtimeAudioDestinationHandler> Create(
AudioNode&, AudioNode&,
const WebAudioLatencyHint&); const WebAudioLatencyHint&,
base::Optional<float> sample_rate);
~RealtimeAudioDestinationHandler() override; ~RealtimeAudioDestinationHandler() override;
// For AudioHandler. // For AudioHandler.
...@@ -80,7 +81,8 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler, ...@@ -80,7 +81,8 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler,
private: private:
explicit RealtimeAudioDestinationHandler(AudioNode&, explicit RealtimeAudioDestinationHandler(AudioNode&,
const WebAudioLatencyHint&); const WebAudioLatencyHint&,
base::Optional<float> sample_rate);
void CreatePlatformDestination(); void CreatePlatformDestination();
void StartPlatformDestination(); void StartPlatformDestination();
...@@ -90,17 +92,22 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler, ...@@ -90,17 +92,22 @@ class RealtimeAudioDestinationHandler final : public AudioDestinationHandler,
// Holds the audio device thread that runs the real time audio context. // Holds the audio device thread that runs the real time audio context.
scoped_refptr<AudioDestination> platform_destination_; scoped_refptr<AudioDestination> platform_destination_;
base::Optional<float> sample_rate_;
}; };
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
class RealtimeAudioDestinationNode final : public AudioDestinationNode { class RealtimeAudioDestinationNode final : public AudioDestinationNode {
public: public:
static RealtimeAudioDestinationNode* Create(AudioContext*, static RealtimeAudioDestinationNode* Create(
const WebAudioLatencyHint&); AudioContext*,
const WebAudioLatencyHint&,
base::Optional<float> sample_rate);
explicit RealtimeAudioDestinationNode(AudioContext&, explicit RealtimeAudioDestinationNode(AudioContext&,
const WebAudioLatencyHint&); const WebAudioLatencyHint&,
base::Optional<float> sample_rate);
}; };
} // namespace blink } // namespace blink
......
...@@ -376,6 +376,8 @@ jumbo_component("platform") { ...@@ -376,6 +376,8 @@ jumbo_component("platform") {
"audio/iir_filter.h", "audio/iir_filter.h",
"audio/mac/fft_frame_mac.cc", "audio/mac/fft_frame_mac.cc",
"audio/mac/vector_math_mac.h", "audio/mac/vector_math_mac.h",
"audio/media_multi_channel_resampler.cc",
"audio/media_multi_channel_resampler.h",
"audio/multi_channel_resampler.cc", "audio/multi_channel_resampler.cc",
"audio/multi_channel_resampler.h", "audio/multi_channel_resampler.h",
"audio/panner.cc", "audio/panner.cc",
...@@ -1651,6 +1653,7 @@ jumbo_source_set("blink_platform_unittests_sources") { ...@@ -1651,6 +1653,7 @@ jumbo_source_set("blink_platform_unittests_sources") {
"animation/compositor_float_animation_curve_test.cc", "animation/compositor_float_animation_curve_test.cc",
"animation/compositor_keyframe_model_test.cc", "animation/compositor_keyframe_model_test.cc",
"animation/timing_function_test.cc", "animation/timing_function_test.cc",
"audio/audio_destination_test.cc",
"audio/push_pull_fifo_multithread_test.cc", "audio/push_pull_fifo_multithread_test.cc",
"audio/push_pull_fifo_test.cc", "audio/push_pull_fifo_test.cc",
"audio/vector_math_test.cc", "audio/vector_math_test.cc",
......
...@@ -6,6 +6,8 @@ include_rules = [ ...@@ -6,6 +6,8 @@ include_rules = [
"+third_party/blink/renderer/platform/audio", "+third_party/blink/renderer/platform/audio",
# Dependencies. # Dependencies.
"+media/base/audio_bus.h",
"+media/base/multi_channel_resampler.h",
"+third_party/blink/renderer/platform/cpu/mips/common_macros_msa.h", "+third_party/blink/renderer/platform/cpu/mips/common_macros_msa.h",
"+third_party/blink/renderer/platform/cross_thread_functional.h", "+third_party/blink/renderer/platform/cross_thread_functional.h",
"+third_party/blink/renderer/platform/geometry/float_point_3d.h", "+third_party/blink/renderer/platform/geometry/float_point_3d.h",
......
...@@ -32,6 +32,7 @@ ...@@ -32,6 +32,7 @@
#include <memory> #include <memory>
#include <utility> #include <utility>
#include "media/base/audio_bus.h"
#include "third_party/blink/public/platform/platform.h" #include "third_party/blink/public/platform/platform.h"
#include "third_party/blink/public/platform/web_audio_latency_hint.h" #include "third_party/blink/public/platform/web_audio_latency_hint.h"
#include "third_party/blink/renderer/platform/audio/audio_utilities.h" #include "third_party/blink/renderer/platform/audio/audio_utilities.h"
...@@ -55,14 +56,16 @@ const size_t kFIFOSize = 96 * 128; ...@@ -55,14 +56,16 @@ const size_t kFIFOSize = 96 * 128;
scoped_refptr<AudioDestination> AudioDestination::Create( scoped_refptr<AudioDestination> AudioDestination::Create(
AudioIOCallback& callback, AudioIOCallback& callback,
unsigned number_of_output_channels, unsigned number_of_output_channels,
const WebAudioLatencyHint& latency_hint) { const WebAudioLatencyHint& latency_hint,
return base::AdoptRef( base::Optional<float> context_sample_rate) {
new AudioDestination(callback, number_of_output_channels, latency_hint)); return base::AdoptRef(new AudioDestination(
callback, number_of_output_channels, latency_hint, context_sample_rate));
} }
AudioDestination::AudioDestination(AudioIOCallback& callback, AudioDestination::AudioDestination(AudioIOCallback& callback,
unsigned number_of_output_channels, unsigned number_of_output_channels,
const WebAudioLatencyHint& latency_hint) const WebAudioLatencyHint& latency_hint,
base::Optional<float> context_sample_rate)
: number_of_output_channels_(number_of_output_channels), : number_of_output_channels_(number_of_output_channels),
play_state_(PlayState::kStopped), play_state_(PlayState::kStopped),
fifo_( fifo_(
...@@ -96,6 +99,26 @@ AudioDestination::AudioDestination(AudioIOCallback& callback, ...@@ -96,6 +99,26 @@ AudioDestination::AudioDestination(AudioIOCallback& callback,
if (!CheckBufferSize()) { if (!CheckBufferSize()) {
NOTREACHED(); NOTREACHED();
} }
if (context_sample_rate.has_value() &&
context_sample_rate.value() != web_audio_device_->SampleRate()) {
double scale_factor =
context_sample_rate.value() / web_audio_device_->SampleRate();
resampler_.reset(new MediaMultiChannelResampler(
MaxChannelCount(), scale_factor, audio_utilities::kRenderQuantumFrames,
ConvertToBaseCallback(
CrossThreadBind(&AudioDestination::ProvideResamplerInput,
CrossThreadUnretained(this)))));
resampler_bus_ =
media::AudioBus::CreateWrapper(render_bus_->NumberOfChannels());
for (unsigned int i = 0; i < render_bus_->NumberOfChannels(); ++i) {
resampler_bus_->SetChannelData(i, render_bus_->Channel(i)->MutableData());
}
resampler_bus_->set_frames(render_bus_->length());
context_sample_rate_ = context_sample_rate.value();
} else {
context_sample_rate_ = web_audio_device_->SampleRate();
}
} }
AudioDestination::~AudioDestination() { AudioDestination::~AudioDestination() {
...@@ -161,17 +184,15 @@ void AudioDestination::RequestRender(size_t frames_requested, ...@@ -161,17 +184,15 @@ void AudioDestination::RequestRender(size_t frames_requested,
delay_timestamp); delay_timestamp);
frames_elapsed_ -= std::min(frames_elapsed_, prior_frames_skipped); frames_elapsed_ -= std::min(frames_elapsed_, prior_frames_skipped);
AudioIOPosition output_position; output_position_.position =
output_position.position =
frames_elapsed_ / static_cast<double>(web_audio_device_->SampleRate()) - frames_elapsed_ / static_cast<double>(web_audio_device_->SampleRate()) -
delay; delay;
output_position.timestamp = delay_timestamp; output_position_.timestamp = delay_timestamp;
base::TimeTicks callback_request = base::TimeTicks::Now(); base::TimeTicks callback_request = base::TimeTicks::Now();
AudioIOCallbackMetric metric; metric_.callback_interval =
metric.callback_interval =
(callback_request - previous_callback_request_).InSecondsF(); (callback_request - previous_callback_request_).InSecondsF();
metric.render_duration = previous_render_duration_.InSecondsF(); metric_.render_duration = previous_render_duration_.InSecondsF();
for (size_t pushed_frames = 0; pushed_frames < frames_to_render; for (size_t pushed_frames = 0; pushed_frames < frames_to_render;
pushed_frames += audio_utilities::kRenderQuantumFrames) { pushed_frames += audio_utilities::kRenderQuantumFrames) {
...@@ -180,18 +201,23 @@ void AudioDestination::RequestRender(size_t frames_requested, ...@@ -180,18 +201,23 @@ void AudioDestination::RequestRender(size_t frames_requested,
// using the elapsed time from the moment it was initially obtained. // using the elapsed time from the moment it was initially obtained.
if (callback_buffer_size_ > audio_utilities::kRenderQuantumFrames * 2) { if (callback_buffer_size_ > audio_utilities::kRenderQuantumFrames * 2) {
double delta = (base::TimeTicks::Now() - callback_request).InSecondsF(); double delta = (base::TimeTicks::Now() - callback_request).InSecondsF();
output_position.position += delta; output_position_.position += delta;
output_position.timestamp += delta; output_position_.timestamp += delta;
} }
// Some implementations give only rough estimation of |delay| so // Some implementations give only rough estimation of |delay| so
// we might have negative estimation |outputPosition| value. // we might have negative estimation |outputPosition| value.
if (output_position.position < 0.0) if (output_position_.position < 0.0)
output_position.position = 0.0; output_position_.position = 0.0;
// Process WebAudio graph and push the rendered output to FIFO. if (resampler_) {
callback_.Render(render_bus_.get(), audio_utilities::kRenderQuantumFrames, resampler_->Resample(audio_utilities::kRenderQuantumFrames,
output_position, metric); resampler_bus_.get());
} else {
// Process WebAudio graph and push the rendered output to FIFO.
callback_.Render(render_bus_.get(), audio_utilities::kRenderQuantumFrames,
output_position_, metric_);
}
fifo_->Push(render_bus_.get()); fifo_->Push(render_bus_.get());
} }
...@@ -306,4 +332,10 @@ bool AudioDestination::CheckBufferSize() { ...@@ -306,4 +332,10 @@ bool AudioDestination::CheckBufferSize() {
kFIFOSize); kFIFOSize);
return is_buffer_size_valid; return is_buffer_size_valid;
} }
void AudioDestination::ProvideResamplerInput(int resampler_frame_delay,
AudioBus* dest) {
callback_.Render(dest, audio_utilities::kRenderQuantumFrames,
output_position_, metric_);
}
} // namespace blink } // namespace blink
...@@ -37,6 +37,7 @@ ...@@ -37,6 +37,7 @@
#include "third_party/blink/public/platform/web_vector.h" #include "third_party/blink/public/platform/web_vector.h"
#include "third_party/blink/renderer/platform/audio/audio_bus.h" #include "third_party/blink/renderer/platform/audio/audio_bus.h"
#include "third_party/blink/renderer/platform/audio/audio_io_callback.h" #include "third_party/blink/renderer/platform/audio/audio_io_callback.h"
#include "third_party/blink/renderer/platform/audio/media_multi_channel_resampler.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread.h" #include "third_party/blink/renderer/platform/scheduler/public/thread.h"
#include "third_party/blink/renderer/platform/wtf/text/wtf_string.h" #include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
#include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h" #include "third_party/blink/renderer/platform/wtf/thread_safe_ref_counted.h"
...@@ -64,13 +65,15 @@ class PLATFORM_EXPORT AudioDestination ...@@ -64,13 +65,15 @@ class PLATFORM_EXPORT AudioDestination
public: public:
AudioDestination(AudioIOCallback&, AudioDestination(AudioIOCallback&,
unsigned number_of_output_channels, unsigned number_of_output_channels,
const WebAudioLatencyHint&); const WebAudioLatencyHint&,
base::Optional<float> context_sample_rate);
~AudioDestination() override; ~AudioDestination() override;
static scoped_refptr<AudioDestination> Create( static scoped_refptr<AudioDestination> Create(
AudioIOCallback&, AudioIOCallback&,
unsigned number_of_output_channels, unsigned number_of_output_channels,
const WebAudioLatencyHint&); const WebAudioLatencyHint&,
base::Optional<float> context_sample_rate);
// The actual render function (WebAudioDevice::RenderCallback) isochronously // The actual render function (WebAudioDevice::RenderCallback) isochronously
// invoked by the media renderer. This is never called after Stop() is called. // invoked by the media renderer. This is never called after Stop() is called.
...@@ -102,8 +105,8 @@ class PLATFORM_EXPORT AudioDestination ...@@ -102,8 +105,8 @@ class PLATFORM_EXPORT AudioDestination
uint32_t CallbackBufferSize() const; uint32_t CallbackBufferSize() const;
bool IsPlaying(); bool IsPlaying();
// TODO(hongchan): this should not be called by the rendering thread. // This is the context sample rate, not the device one.
double SampleRate() const { return web_audio_device_->SampleRate(); } double SampleRate() const { return context_sample_rate_; }
// Returns the audio buffer size in frames used by the underlying audio // Returns the audio buffer size in frames used by the underlying audio
// hardware. // hardware.
...@@ -114,6 +117,9 @@ class PLATFORM_EXPORT AudioDestination ...@@ -114,6 +117,9 @@ class PLATFORM_EXPORT AudioDestination
static uint32_t MaxChannelCount(); static uint32_t MaxChannelCount();
private: private:
// Provide input to the resampler (if used).
void ProvideResamplerInput(int resampler_frame_delay, AudioBus* dest);
enum class PlayState { kStopped, kPlaying, kPaused }; enum class PlayState { kStopped, kPlaying, kPaused };
// Check if the buffer size chosen by the WebAudioDevice is too large. // Check if the buffer size chosen by the WebAudioDevice is too large.
...@@ -155,6 +161,18 @@ class PLATFORM_EXPORT AudioDestination ...@@ -155,6 +161,18 @@ class PLATFORM_EXPORT AudioDestination
// Accessed by rendering thread. // Accessed by rendering thread.
size_t frames_elapsed_; size_t frames_elapsed_;
// The sample rate used for rendering the Web Audio graph.
float context_sample_rate_;
// Used for resampling if the Web Audio sample rate differs from the platform
// one.
std::unique_ptr<MediaMultiChannelResampler> resampler_;
std::unique_ptr<media::AudioBus> resampler_bus_;
// Required for RequestRender and also in the resampling callback (if used).
AudioIOPosition output_position_;
AudioIOCallbackMetric metric_;
DISALLOW_COPY_AND_ASSIGN(AudioDestination); DISALLOW_COPY_AND_ASSIGN(AudioDestination);
}; };
......
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/audio/audio_destination.h"
#include <memory>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_audio_device.h"
#include "third_party/blink/public/platform/web_audio_latency_hint.h"
#include "third_party/blink/renderer/platform/audio/audio_io_callback.h"
#include "third_party/blink/renderer/platform/audio/audio_utilities.h"
#include "third_party/blink/renderer/platform/testing/testing_platform_support.h"
namespace blink {
namespace {
class MockWebAudioDevice : public WebAudioDevice {
public:
explicit MockWebAudioDevice(double sample_rate, int frames_per_buffer)
: sample_rate_(sample_rate), frames_per_buffer_(frames_per_buffer) {}
void Start() override {}
void Stop() override {}
void Pause() override {}
void Resume() override {}
double SampleRate() override { return sample_rate_; }
int FramesPerBuffer() override { return frames_per_buffer_; }
private:
double sample_rate_;
int frames_per_buffer_;
};
class TestPlatform : public TestingPlatformSupport {
public:
std::unique_ptr<WebAudioDevice> CreateAudioDevice(
unsigned number_of_input_channels,
unsigned number_of_channels,
const WebAudioLatencyHint& latency_hint,
WebAudioDevice::RenderCallback*,
const WebString& device_id) override {
return std::make_unique<MockWebAudioDevice>(AudioHardwareSampleRate(),
AudioHardwareBufferSize());
}
double AudioHardwareSampleRate() override { return 44100; }
size_t AudioHardwareBufferSize() override { return 512; }
unsigned AudioHardwareOutputChannels() override { return 2; }
};
class AudioCallback : public blink::AudioIOCallback {
public:
void Render(AudioBus* destination_bus,
uint32_t frames_to_process,
const AudioIOPosition& output_position,
const AudioIOCallbackMetric& metric) override {
frames_processed_ += frames_to_process;
}
AudioCallback() : frames_processed_(0) {}
int frames_processed_;
};
void CountWASamplesProcessedForRate(base::Optional<float> sample_rate) {
WebAudioLatencyHint latency_hint(WebAudioLatencyHint::kCategoryInteractive);
AudioCallback callback;
const int channel_count = Platform::Current()->AudioHardwareOutputChannels();
const size_t request_frames = Platform::Current()->AudioHardwareBufferSize();
scoped_refptr<AudioDestination> destination = AudioDestination::Create(
callback, channel_count, latency_hint, sample_rate);
std::vector<float> channels[channel_count];
WebVector<float*> dest_data(static_cast<size_t>(channel_count));
for (int i = 0; i < channel_count; ++i) {
channels[i].resize(request_frames);
dest_data[i] = channels[i].data();
}
destination->Render(dest_data, request_frames, 0, 0, 0);
int exact_frames_required =
std::ceil(request_frames * destination->SampleRate() /
Platform::Current()->AudioHardwareSampleRate());
int expected_frames_processed =
std::ceil(exact_frames_required /
static_cast<double>(audio_utilities::kRenderQuantumFrames)) *
audio_utilities::kRenderQuantumFrames;
EXPECT_EQ(expected_frames_processed, callback.frames_processed_);
}
TEST(AudioDestinationTest, ResamplingTest) {
ScopedTestingPlatformSupport<TestPlatform> platform;
CountWASamplesProcessedForRate(base::Optional<float>());
CountWASamplesProcessedForRate(8000);
CountWASamplesProcessedForRate(24000);
CountWASamplesProcessedForRate(44100);
CountWASamplesProcessedForRate(48000);
CountWASamplesProcessedForRate(384000);
}
} // namespace
} // namespace blink
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/audio/media_multi_channel_resampler.h"
#include <memory>
#include "base/bind.h"
#include "media/base/audio_bus.h"
#include "third_party/blink/renderer/platform/audio/audio_bus.h"
namespace blink {
MediaMultiChannelResampler::MediaMultiChannelResampler(
int channels,
double io_sample_rate_ratio,
size_t request_frames,
const ReadCB& read_cb)
: read_cb_(read_cb) {
resampler_.reset(new media::MultiChannelResampler(
channels, io_sample_rate_ratio, request_frames,
base::BindRepeating(&MediaMultiChannelResampler::ProvideResamplerInput,
base::Unretained(this))));
}
void MediaMultiChannelResampler::Resample(int frames,
media::AudioBus* audio_bus) {
resampler_->Resample(audio_bus->frames(), audio_bus);
}
void MediaMultiChannelResampler::ProvideResamplerInput(
int resampler_frame_delay,
media::AudioBus* dest) {
// Create a blink::AudioBus wrapper around the memory provided by the
// media::AudioBus.
scoped_refptr<AudioBus> bus =
AudioBus::Create(dest->channels(), dest->frames(), false);
for (int i = 0; i < dest->channels(); ++i) {
bus->SetChannelMemory(i, dest->channel(i), dest->frames());
}
read_cb_.Run(resampler_frame_delay, bus.get());
}
} // namespace blink
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_AUDIO_MEDIA_MULTI_CHANNEL_RESAMPLER_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_AUDIO_MEDIA_MULTI_CHANNEL_RESAMPLER_H_
#include <memory>
#include "base/callback.h"
#include "base/macros.h"
#include "media/base/multi_channel_resampler.h"
#include "third_party/blink/renderer/platform/platform_export.h"
#include "third_party/blink/renderer/platform/wtf/allocator.h"
namespace media {
class AudioBus;
} // namespace media
namespace blink {
class AudioBus;
// This is a simple wrapper around the MultiChannelResampler provided by the
// media layer.
class PLATFORM_EXPORT MediaMultiChannelResampler {
USING_FAST_MALLOC(MediaMultiChannelResampler);
// Callback type for providing more data into the resampler. Expects AudioBus
// to be completely filled with data upon return; zero padded if not enough
// frames are available to satisfy the request. |frame_delay| is the number
// of output frames already processed and can be used to estimate delay.
typedef base::RepeatingCallback<void(int frame_delay, AudioBus* audio_bus)>
ReadCB;
public:
// Constructs a MultiChannelResampler with the specified |read_cb|, which is
// used to acquire audio data for resampling. |io_sample_rate_ratio| is the
// ratio of input / output sample rates. |request_frames| is the size in
// frames of the AudioBus to be filled by |read_cb|.
MediaMultiChannelResampler(int channels,
double io_sample_rate_ratio,
size_t request_frames,
const ReadCB& read_cb);
// Resamples |frames| of data from |read_cb_| into AudioBus.
void Resample(int frames, media::AudioBus* audio_bus);
private:
// Wrapper method used to provide input to the media::MultiChannelResampler
// with a media::AudioBus rather than a blink::AudioBus.
void ProvideResamplerInput(int resampler_frame_delay, media::AudioBus* dest);
// The resampler being wrapped by this class.
std::unique_ptr<media::MultiChannelResampler> resampler_;
// The callback using a blink::AudioBus that will be called by
// ProvideResamplerInput().
ReadCB read_cb_;
DISALLOW_COPY_AND_ASSIGN(MediaMultiChannelResampler);
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_AUDIO_MEDIA_MULTI_CHANNEL_RESAMPLER_H_
...@@ -156,6 +156,45 @@ ...@@ -156,6 +156,45 @@
}); });
}); });
audit.define(
{
label: 'test-audiocontextoptions-sampleRate',
description:
'Test creating contexts with non-default sampleRate values.'
},
function(task, should) {
// A sampleRate of 1 is unlikely to be supported on any browser,
// test that this rate is rejected.
should(
() => {
context = new AudioContext({sampleRate: 1})
},
'context = new AudioContext({sampleRate: 1})')
.throw(DOMException);
// A sampleRate of 1,000,000 is unlikely to be supported on any
// browser, test that this rate is also rejected.
should(
() => {
context = new AudioContext({sampleRate: 1000000})
},
'context = new AudioContext({sampleRate: 1000000})')
.throw(DOMException);
should(
() => {
context = new AudioContext({sampleRate: 24000})
},
'context = new AudioContext({sampleRate: 24000})')
.notThrow();
should(
context.sampleRate, 'sampleRate inrange')
.beEqualTo(24000);
context.close();
task.done();
});
audit.run(); audit.run();
</script> </script>
</body> </body>
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment