Commit d8b32f76 authored by Hongchan Choi's avatar Hongchan Choi Committed by Commit Bot

Add currentFrame in AudioWorkletGlobalScope

This CL adds |currentFrame| property to AudioWorkletGlobalScope according
to: https://github.com/WebAudio/web-audio-api/pull/1493.

1. Previously the time stamp (currentTime) was updated when the first
   worklet processor gets called, and it was inconsistent with how it is
   updated in the other nodes. Now updating the frame number is
   streamlined, so the worklet processor also gets the same treatment.

2. In order to streamline this process, WorkerThread reference was
   added to BaseAudioContext. Due to the GC rule, BaseAudioContext cannot
   keep a reference of WorkletGlobalScope, so it always derives the
   global scope from the worker thread.

Bug: 814794
Test: http/tests/webaudio/audio-worklet/timing-info.html
Change-Id: I4b313377d80f8678c473cf788211e373fd1644cb
Reviewed-on: https://chromium-review.googlesource.com/935157
Commit-Queue: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#539300}
parent 384c9d45
/**
* @class TimingInfoProcessor
* @extends AudioWorkletProcessor
*
* This processor class is to test the timing information in AWGS.
*/
class TimingInfoProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.port.onmessage = this.echoMessage.bind(this);
}
echoMessage(event) {
this.port.postMessage({
currentTime: currentTime,
currentFrame: currentFrame
});
}
process() {
return true;
}
}
registerProcessor('timing-info-processor', TimingInfoProcessor);
<!DOCTYPE html>
<html>
<head>
<title>
Test currentTime and currentFrame in AudioWorkletGlobalScope
</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../../../webaudio-resources/audit.js"></script>
<script src="audio-worklet-common.js"></script>
</head>
<body>
<script id="layout-test-code">
// TODO(hongchan): remove this assertion when AudioWorklet shipped.
assertAudioWorklet();
let audit = Audit.createTaskRunner();
let sampleRate = 48000;
let renderLength = 512;
let context = new OfflineAudioContext(1, renderLength, sampleRate);
audit.define(
'Check the timing information from AudioWorkletProcessor',
(task, should) => {
let portWorkletNode =
new AudioWorkletNode(context, 'timing-info-processor');
portWorkletNode.connect(context.destination);
// Suspend at render quantum boundary and check the timing
// information between the main thread and the rendering thread.
[0, 128, 256, 384].map((suspendFrame) => {
context.suspend(suspendFrame/sampleRate).then(() => {
portWorkletNode.port.onmessage = (event) => {
should(event.data.currentFrame,
'currentFrame from the processor at ' + suspendFrame)
.beEqualTo(suspendFrame);
should(event.data.currentTime,
'currentTime from the processor at '
+ context.currentTime)
.beEqualTo(context.currentTime);
context.resume();
};
portWorkletNode.port.postMessage('query-timing-info');
});
});
context.startRendering().then(() => {
task.done();
});
});
context.audioWorklet.addModule('timing-info-processor.js').then(() => {
audit.run();
});
</script>
</body>
</html>
...@@ -110,6 +110,8 @@ void AudioDestinationHandler::Render(AudioBus* source_bus, ...@@ -110,6 +110,8 @@ void AudioDestinationHandler::Render(AudioBus* source_bus,
// Advance current sample-frame. // Advance current sample-frame.
size_t new_sample_frame = current_sample_frame_ + number_of_frames; size_t new_sample_frame = current_sample_frame_ + number_of_frames;
ReleaseStore(&current_sample_frame_, new_sample_frame); ReleaseStore(&current_sample_frame_, new_sample_frame);
Context()->UpdateWorkletGlobalScopeOnRenderingThread();
} }
// ---------------------------------------------------------------- // ----------------------------------------------------------------
......
...@@ -45,7 +45,7 @@ void AudioWorklet::NotifyGlobalScopeIsUpdated() { ...@@ -45,7 +45,7 @@ void AudioWorklet::NotifyGlobalScopeIsUpdated() {
WebThread* AudioWorklet::GetBackingThread() { WebThread* AudioWorklet::GetBackingThread() {
DCHECK(IsMainThread()); DCHECK(IsMainThread());
DCHECK(GetMessagingProxy()); DCHECK(GetMessagingProxy());
return GetMessagingProxy()->GetWorkletBackingThread(); return GetMessagingProxy()->GetBackingWebThread();
} }
BaseAudioContext* AudioWorklet::GetBaseAudioContext() const { BaseAudioContext* AudioWorklet::GetBaseAudioContext() const {
......
...@@ -44,6 +44,9 @@ class MODULES_EXPORT AudioWorklet final : public Worklet { ...@@ -44,6 +44,9 @@ class MODULES_EXPORT AudioWorklet final : public Worklet {
BaseAudioContext* GetBaseAudioContext() const; BaseAudioContext* GetBaseAudioContext() const;
// Returns |nullptr| if there is no active WorkletGlobalScope().
AudioWorkletMessagingProxy* GetMessagingProxy();
const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor( const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor(
const String& name); const String& name);
...@@ -62,9 +65,6 @@ class MODULES_EXPORT AudioWorklet final : public Worklet { ...@@ -62,9 +65,6 @@ class MODULES_EXPORT AudioWorklet final : public Worklet {
bool NeedsToCreateGlobalScope() final; bool NeedsToCreateGlobalScope() final;
WorkletGlobalScopeProxy* CreateGlobalScope() final; WorkletGlobalScopeProxy* CreateGlobalScope() final;
// Returns |nullptr| if there is no active WorkletGlobalScope().
AudioWorkletMessagingProxy* GetMessagingProxy();
// To catch the first global scope update and notify the context. // To catch the first global scope update and notify the context.
bool worklet_started_ = false; bool worklet_started_ = false;
......
...@@ -190,18 +190,10 @@ bool AudioWorkletGlobalScope::Process( ...@@ -190,18 +190,10 @@ bool AudioWorkletGlobalScope::Process(
AudioWorkletProcessor* processor, AudioWorkletProcessor* processor,
Vector<AudioBus*>* input_buses, Vector<AudioBus*>* input_buses,
Vector<AudioBus*>* output_buses, Vector<AudioBus*>* output_buses,
HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map, HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map) {
double current_time) {
CHECK_GE(input_buses->size(), 0u); CHECK_GE(input_buses->size(), 0u);
CHECK_GE(output_buses->size(), 0u); CHECK_GE(output_buses->size(), 0u);
// Note that all AudioWorkletProcessors share this method for the processing.
// AudioWorkletGlobalScope's |current_time_| must be updated only once per
// render quantum.
if (current_time_ < current_time) {
current_time_ = current_time;
}
ScriptState* script_state = ScriptController()->GetScriptState(); ScriptState* script_state = ScriptController()->GetScriptState();
ScriptState::Scope scope(script_state); ScriptState::Scope scope(script_state);
...@@ -376,10 +368,20 @@ ProcessorCreationParams* AudioWorkletGlobalScope::GetProcessorCreationParams() { ...@@ -376,10 +368,20 @@ ProcessorCreationParams* AudioWorkletGlobalScope::GetProcessorCreationParams() {
return processor_creation_params_.get(); return processor_creation_params_.get();
} }
void AudioWorkletGlobalScope::SetCurrentFrame(size_t current_frame) {
current_frame_ = current_frame;
}
void AudioWorkletGlobalScope::SetSampleRate(float sample_rate) { void AudioWorkletGlobalScope::SetSampleRate(float sample_rate) {
sample_rate_ = sample_rate; sample_rate_ = sample_rate;
} }
double AudioWorkletGlobalScope::currentTime() const {
return sample_rate_ > 0.0
? current_frame_ / static_cast<double>(sample_rate_)
: 0.0;
}
void AudioWorkletGlobalScope::Trace(blink::Visitor* visitor) { void AudioWorkletGlobalScope::Trace(blink::Visitor* visitor) {
visitor->Trace(processor_definition_map_); visitor->Trace(processor_definition_map_);
visitor->Trace(processor_instances_); visitor->Trace(processor_instances_);
......
...@@ -81,8 +81,7 @@ class MODULES_EXPORT AudioWorkletGlobalScope final ...@@ -81,8 +81,7 @@ class MODULES_EXPORT AudioWorkletGlobalScope final
AudioWorkletProcessor*, AudioWorkletProcessor*,
Vector<AudioBus*>* input_buses, Vector<AudioBus*>* input_buses,
Vector<AudioBus*>* output_buses, Vector<AudioBus*>* output_buses,
HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map, HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map);
double current_time);
AudioWorkletProcessorDefinition* FindDefinition(const String& name); AudioWorkletProcessorDefinition* FindDefinition(const String& name);
...@@ -95,10 +94,12 @@ class MODULES_EXPORT AudioWorkletGlobalScope final ...@@ -95,10 +94,12 @@ class MODULES_EXPORT AudioWorkletGlobalScope final
// is no on-going processor construction, this MUST return nullptr. // is no on-going processor construction, this MUST return nullptr.
ProcessorCreationParams* GetProcessorCreationParams(); ProcessorCreationParams* GetProcessorCreationParams();
void SetCurrentFrame(size_t current_frame);
void SetSampleRate(float sample_rate); void SetSampleRate(float sample_rate);
// IDL // IDL
double currentTime() const { return current_time_; } unsigned long long currentFrame() const { return current_frame_; }
double currentTime() const;
float sampleRate() const { return sample_rate_; } float sampleRate() const { return sample_rate_; }
void Trace(blink::Visitor*); void Trace(blink::Visitor*);
...@@ -125,7 +126,7 @@ class MODULES_EXPORT AudioWorkletGlobalScope final ...@@ -125,7 +126,7 @@ class MODULES_EXPORT AudioWorkletGlobalScope final
// detail. // detail.
std::unique_ptr<ProcessorCreationParams> processor_creation_params_; std::unique_ptr<ProcessorCreationParams> processor_creation_params_;
double current_time_ = 0.0; size_t current_frame_ = 0;
float sample_rate_ = 0.0; float sample_rate_ = 0.0;
}; };
......
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
OriginTrialEnabled=AudioWorklet OriginTrialEnabled=AudioWorklet
] interface AudioWorkletGlobalScope : WorkletGlobalScope { ] interface AudioWorkletGlobalScope : WorkletGlobalScope {
[RaisesException, MeasureAs=AudioWorkletGlobalScopeRegisterProcessor] void registerProcessor(DOMString name, Function processorConstructor); [RaisesException, MeasureAs=AudioWorkletGlobalScopeRegisterProcessor] void registerProcessor(DOMString name, Function processorConstructor);
readonly attribute unsigned long long currentFrame;
readonly attribute double currentTime; readonly attribute double currentTime;
readonly attribute float sampleRate; readonly attribute float sampleRate;
}; };
...@@ -308,7 +308,7 @@ class AudioWorkletGlobalScopeTest : public PageTestBase { ...@@ -308,7 +308,7 @@ class AudioWorkletGlobalScopeTest : public PageTestBase {
// Then invoke the process() method to perform JS buffer manipulation. The // Then invoke the process() method to perform JS buffer manipulation. The
// output buffer should contain a constant value of 2. // output buffer should contain a constant value of 2.
processor->Process(&input_buses, &output_buses, &param_data_map, 0.0); processor->Process(&input_buses, &output_buses, &param_data_map);
for (unsigned i = 0; i < output_channel->length(); ++i) { for (unsigned i = 0; i < output_channel->length(); ++i) {
EXPECT_EQ(output_channel->Data()[i], 2); EXPECT_EQ(output_channel->Data()[i], 2);
} }
......
...@@ -78,11 +78,15 @@ AudioWorkletMessagingProxy::GetParamInfoListForProcessor( ...@@ -78,11 +78,15 @@ AudioWorkletMessagingProxy::GetParamInfoListForProcessor(
return processor_info_map_.at(name); return processor_info_map_.at(name);
} }
WebThread* AudioWorkletMessagingProxy::GetWorkletBackingThread() { WebThread* AudioWorkletMessagingProxy::GetBackingWebThread() {
auto worklet_thread = static_cast<AudioWorkletThread*>(GetWorkerThread()); auto worklet_thread = static_cast<AudioWorkletThread*>(GetWorkerThread());
return worklet_thread->GetSharedBackingThread(); return worklet_thread->GetSharedBackingThread();
} }
WorkerThread* AudioWorkletMessagingProxy::GetBackingWorkerThread() {
return GetWorkerThread();
}
std::unique_ptr<ThreadedWorkletObjectProxy> std::unique_ptr<ThreadedWorkletObjectProxy>
AudioWorkletMessagingProxy::CreateObjectProxy( AudioWorkletMessagingProxy::CreateObjectProxy(
ThreadedWorkletMessagingProxy* messaging_proxy, ThreadedWorkletMessagingProxy* messaging_proxy,
......
...@@ -56,7 +56,8 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy { ...@@ -56,7 +56,8 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor( const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor(
const String& name) const; const String& name) const;
WebThread* GetWorkletBackingThread(); WebThread* GetBackingWebThread();
WorkerThread* GetBackingWorkerThread();
void Trace(Visitor*); void Trace(Visitor*);
......
...@@ -103,8 +103,7 @@ void AudioWorkletHandler::Process(size_t frames_to_process) { ...@@ -103,8 +103,7 @@ void AudioWorkletHandler::Process(size_t frames_to_process) {
// Run the render code and check the state of processor. Finish the // Run the render code and check the state of processor. Finish the
// processor if needed. // processor if needed.
if (!processor_->Process(&inputBuses, &outputBuses, &param_value_map_, if (!processor_->Process(&inputBuses, &outputBuses, &param_value_map_) ||
Context()->currentTime()) ||
!processor_->IsRunnable()) { !processor_->IsRunnable()) {
FinishProcessorOnRenderThread(); FinishProcessorOnRenderThread();
} }
......
...@@ -35,12 +35,11 @@ AudioWorkletProcessor::AudioWorkletProcessor( ...@@ -35,12 +35,11 @@ AudioWorkletProcessor::AudioWorkletProcessor(
bool AudioWorkletProcessor::Process( bool AudioWorkletProcessor::Process(
Vector<AudioBus*>* input_buses, Vector<AudioBus*>* input_buses,
Vector<AudioBus*>* output_buses, Vector<AudioBus*>* output_buses,
HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map, HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map) {
double current_time) {
DCHECK(global_scope_->IsContextThread()); DCHECK(global_scope_->IsContextThread());
DCHECK(IsRunnable()); DCHECK(IsRunnable());
return global_scope_->Process(this, input_buses, output_buses, return global_scope_->Process(this, input_buses, output_buses,
param_value_map, current_time); param_value_map);
} }
MessagePort* AudioWorkletProcessor::port() const { MessagePort* AudioWorkletProcessor::port() const {
......
...@@ -43,8 +43,7 @@ class MODULES_EXPORT AudioWorkletProcessor : public ScriptWrappable { ...@@ -43,8 +43,7 @@ class MODULES_EXPORT AudioWorkletProcessor : public ScriptWrappable {
bool Process( bool Process(
Vector<AudioBus*>* input_buses, Vector<AudioBus*>* input_buses,
Vector<AudioBus*>* output_buses, Vector<AudioBus*>* output_buses,
HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map, HashMap<String, std::unique_ptr<AudioFloatArray>>* param_value_map);
double current_time);
const String& Name() const { return name_; } const String& Name() const { return name_; }
......
...@@ -38,6 +38,7 @@ ...@@ -38,6 +38,7 @@
#include "core/inspector/ConsoleMessage.h" #include "core/inspector/ConsoleMessage.h"
#include "core/inspector/ConsoleTypes.h" #include "core/inspector/ConsoleTypes.h"
#include "core/origin_trials/origin_trials.h" #include "core/origin_trials/origin_trials.h"
#include "core/workers/WorkerThread.h"
#include "modules/mediastream/MediaStream.h" #include "modules/mediastream/MediaStream.h"
#include "modules/webaudio/AnalyserNode.h" #include "modules/webaudio/AnalyserNode.h"
#include "modules/webaudio/AudioBuffer.h" #include "modules/webaudio/AudioBuffer.h"
...@@ -47,6 +48,7 @@ ...@@ -47,6 +48,7 @@
#include "modules/webaudio/AudioNodeInput.h" #include "modules/webaudio/AudioNodeInput.h"
#include "modules/webaudio/AudioNodeOutput.h" #include "modules/webaudio/AudioNodeOutput.h"
#include "modules/webaudio/AudioWorklet.h" #include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletGlobalScope.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h" #include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/BiquadFilterNode.h" #include "modules/webaudio/BiquadFilterNode.h"
#include "modules/webaudio/ChannelMergerNode.h" #include "modules/webaudio/ChannelMergerNode.h"
...@@ -1000,8 +1002,12 @@ AudioWorklet* BaseAudioContext::audioWorklet() const { ...@@ -1000,8 +1002,12 @@ AudioWorklet* BaseAudioContext::audioWorklet() const {
} }
void BaseAudioContext::NotifyWorkletIsReady() { void BaseAudioContext::NotifyWorkletIsReady() {
DCHECK(IsMainThread());
DCHECK(audioWorklet()->IsReady()); DCHECK(audioWorklet()->IsReady());
worklet_backing_worker_thread_ =
audioWorklet()->GetMessagingProxy()->GetBackingWorkerThread();
// If the context is running or suspended, restart the destination to switch // If the context is running or suspended, restart the destination to switch
// the render thread with the worklet thread. Note that restarting can happen // the render thread with the worklet thread. Note that restarting can happen
// right after the context construction. // right after the context construction.
...@@ -1010,4 +1016,18 @@ void BaseAudioContext::NotifyWorkletIsReady() { ...@@ -1010,4 +1016,18 @@ void BaseAudioContext::NotifyWorkletIsReady() {
} }
} }
void BaseAudioContext::UpdateWorkletGlobalScopeOnRenderingThread() {
DCHECK(!IsMainThread());
// Only if the worklet is properly initialized and ready.
if (worklet_backing_worker_thread_) {
AudioWorkletGlobalScope* global_scope =
ToAudioWorkletGlobalScope(
worklet_backing_worker_thread_->GlobalScope());
DCHECK(global_scope);
global_scope->SetCurrentFrame(CurrentSampleFrame());
}
}
} // namespace blink } // namespace blink
...@@ -83,6 +83,7 @@ class ScriptState; ...@@ -83,6 +83,7 @@ class ScriptState;
class SecurityOrigin; class SecurityOrigin;
class StereoPannerNode; class StereoPannerNode;
class WaveShaperNode; class WaveShaperNode;
class WorkerThread;
// BaseAudioContext is the cornerstone of the web audio API and all AudioNodes // BaseAudioContext is the cornerstone of the web audio API and all AudioNodes
// are created from it. For thread safety between the audio thread and the main // are created from it. For thread safety between the audio thread and the main
...@@ -340,6 +341,11 @@ class MODULES_EXPORT BaseAudioContext ...@@ -340,6 +341,11 @@ class MODULES_EXPORT BaseAudioContext
// the first script evaluation. // the first script evaluation.
void NotifyWorkletIsReady(); void NotifyWorkletIsReady();
// Update the information in AudioWorkletGlobalScope if necessary. Must be
// called from the rendering thread. Does nothing when the global scope
// does not exist.
void UpdateWorkletGlobalScopeOnRenderingThread();
protected: protected:
enum ContextType { kRealtimeContext, kOfflineContext }; enum ContextType { kRealtimeContext, kOfflineContext };
...@@ -511,6 +517,10 @@ class MODULES_EXPORT BaseAudioContext ...@@ -511,6 +517,10 @@ class MODULES_EXPORT BaseAudioContext
AudioIOPosition output_position_; AudioIOPosition output_position_;
Member<AudioWorklet> audio_worklet_; Member<AudioWorklet> audio_worklet_;
// Only for the access to AudioWorkletGlobalScope from the render thread.
// Use the WebThread in the destination nodes for the task scheduling.
WorkerThread* worklet_backing_worker_thread_ = nullptr;
}; };
} // namespace blink } // namespace blink
......
...@@ -354,6 +354,8 @@ bool OfflineAudioDestinationHandler::RenderIfNotSuspended( ...@@ -354,6 +354,8 @@ bool OfflineAudioDestinationHandler::RenderIfNotSuspended(
size_t new_sample_frame = current_sample_frame_ + number_of_frames; size_t new_sample_frame = current_sample_frame_ + number_of_frames;
ReleaseStore(&current_sample_frame_, new_sample_frame); ReleaseStore(&current_sample_frame_, new_sample_frame);
Context()->UpdateWorkletGlobalScopeOnRenderingThread();
return false; return false;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment