Commit 249e75ed authored by Hongchan Choi's avatar Hongchan Choi Committed by Commit Bot

Move audioWorklet under BaseAudioContext

Spec PR (approved): https://github.com/WebAudio/web-audio-api/pull/1445

Based on the spec change, this CL moves audioWorklet from `window`
to `BaseAudioContext`. This alters the relationship between
BaseAudioContext, AudioWorkletMessagingProxy and AudioWorkletGlobalScope:

- Now AWMP/AWGS cannot exist without BaseAudioContext, this reduces
  the code complexity by removing checks.
- With this clear ownership, BaseAudioContext only needs to talk to
  AudioWorklet. AudioWorkletMessagingProxy can be hidden.
- When the script evaluation is completed, AWGS will notify the
  associated BaseAudioContext via AWObjectProxy - AWMessagingProxy.

Bug: 786542
Change-Id: I7b55fcacf304a4a25b527bcbcd285bef80881b8d
Reviewed-on: https://chromium-review.googlesource.com/779862
Commit-Queue: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarHiroki Nakagawa <nhiroki@chromium.org>
Cr-Commit-Position: refs/heads/master@{#521078}
parent fe1dd778
/** // Check if AudioWorklet is available before running a test.
* Assert AudioWorklet is enabled.
*
* The content_shell driven by run-webkit-tests.py is supposed to enable all the
* experimental web platform features.
*
* We also want to run the test on the browser. So we check both cases for
* the content shell and the browser.
*/
function assertAudioWorklet() { function assertAudioWorklet() {
let offlineContext = new OfflineAudioContext(1, 1, 44100);
// We want to be able to run tests both on the browser and the content shell.
// So check if AudioWorklet runtime flag is enabled, or check the context
// has AudioWorklet object.
if ((Boolean(window.internals) && if ((Boolean(window.internals) &&
Boolean(window.internals.runtimeFlags.audioWorkletEnabled)) || Boolean(window.internals.runtimeFlags.audioWorkletEnabled)) ||
(Boolean(window.Worklet) && Boolean(window.audioWorklet))) { offlineContext.audioWorklet instanceof AudioWorklet) {
return; return;
} }
......
...@@ -21,8 +21,8 @@ ...@@ -21,8 +21,8 @@
// Sets up AudioWorklet and OfflineAudioContext. // Sets up AudioWorklet and OfflineAudioContext.
audit.define('Initializing AudioWorklet and Context', (task, should) => { audit.define('Initializing AudioWorklet and Context', (task, should) => {
audioWorklet.addModule('gain-processor.js').then(() => { context = new OfflineAudioContext(1, renderLength, sampleRate);
context = new OfflineAudioContext(1, renderLength, sampleRate); context.audioWorklet.addModule('gain-processor.js').then(() => {
task.done(); task.done();
}); });
}); });
......
...@@ -36,17 +36,19 @@ ...@@ -36,17 +36,19 @@
audit.define( audit.define(
{label: 'construction-after-module-loading'}, {label: 'construction-after-module-loading'},
(task, should) => { (task, should) => {
audioWorklet.addModule('dummy-processor.js').then(() => { realtimeContext.audioWorklet.addModule('dummy-processor.js')
let dummyWorkletNode = .then(() => {
new AudioWorkletNode(realtimeContext, 'dummy'); let dummyWorkletNode =
should(dummyWorkletNode instanceof AudioWorkletNode, new AudioWorkletNode(realtimeContext, 'dummy');
'"dummyWorkletNode" is an instance of AudioWorkletNode') should(dummyWorkletNode instanceof AudioWorkletNode,
.beTrue(); '"dummyWorkletNode" is an instance of '
should(() => new AudioWorkletNode(realtimeContext, 'foobar'), + 'AudioWorkletNode')
'Unregistered name "foobar" must throw an exception.') .beTrue();
.throw(); should(() => new AudioWorkletNode(realtimeContext, 'foobar'),
task.done(); 'Unregistered name "foobar" must throw an exception.')
}); .throw();
task.done();
});
}); });
audit.run(); audit.run();
......
...@@ -21,8 +21,8 @@ ...@@ -21,8 +21,8 @@
// Load script file and create a OfflineAudiocontext. // Load script file and create a OfflineAudiocontext.
audit.define('setup', (task, should) => { audit.define('setup', (task, should) => {
audioWorklet.addModule('dummy-processor.js').then(() => { context = new OfflineAudioContext(1, 1, sampleRate);
context = new OfflineAudioContext(1, 1, sampleRate); context.audioWorklet.addModule('dummy-processor.js').then(() => {
task.done(); task.done();
}); });
}); });
......
...@@ -23,59 +23,63 @@ ...@@ -23,59 +23,63 @@
audit.define('pending-running-stopped', audit.define('pending-running-stopped',
(task, should) => { (task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate); let context = new OfflineAudioContext(1, renderLength, sampleRate);
let timedWorkletNode = new AudioWorkletNode(context, 'timed'); context.audioWorklet.addModule('state-processor.js').then(() => {
let timedWorkletNode = new AudioWorkletNode(context, 'timed');
// The construction of associated processor has not been // The construction of associated processor has not been
// completed. In this state, no audio processing can happen and // completed. In this state, no audio processing can happen and
// all messages to the processor will be queued. // all messages to the processor will be queued.
should(timedWorkletNode.processorState, should(timedWorkletNode.processorState,
'Checking the processor state upon the constructor call') 'Checking the processor state upon the constructor call')
.beEqualTo('pending'); .beEqualTo('pending');
timedWorkletNode.connect(context.destination); timedWorkletNode.connect(context.destination);
// Checks the handler of |onprocessorstatechange|. Because the // Checks the handler of |onprocessorstatechange|. Because the
// processor script is correct, the |running| state change MUST // processor script is correct, the |running| state change MUST
// be fired. // be fired.
let isFirstPhase = true; let isFirstPhase = true;
timedWorkletNode.onprocessorstatechange = () => { timedWorkletNode.onprocessorstatechange = () => {
// The first phase should be "running". // The first phase should be "running".
if (isFirstPhase) { if (isFirstPhase) {
should(timedWorkletNode.processorState, should(timedWorkletNode.processorState,
'Checking the processor state upon ' + 'Checking the processor state upon ' +
'processorstatechange event') 'processorstatechange event')
.beEqualTo('running'); .beEqualTo('running');
isFirstPhase = false; isFirstPhase = false;
} else { } else {
// The second phase in this test must be "stopped". // The second phase in this test must be "stopped".
should(timedWorkletNode.processorState, should(timedWorkletNode.processorState,
'Checking the processor state after ' + 'Checking the processor state after ' +
'processor stopped processing') 'processor stopped processing')
.beEqualTo('stopped'); .beEqualTo('stopped');
task.done(); task.done();
} }
}; };
context.startRendering(); context.startRendering();
});
}); });
// Test the error state caused by the failure of processor constructor. // Test the error state caused by the failure of processor constructor.
audit.define('constructor-error', audit.define('constructor-error',
(task, should) => { (task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate); let context = new OfflineAudioContext(1, renderLength, sampleRate);
let constructorErrorWorkletNode = context.audioWorklet.addModule('state-processor.js').then(() => {
new AudioWorkletNode(context, 'constructor-error'); let constructorErrorWorkletNode =
should(constructorErrorWorkletNode.processorState, new AudioWorkletNode(context, 'constructor-error');
'constructorErrorWorkletNode.processorState after ' +
'its construction')
.beEqualTo('pending');
constructorErrorWorkletNode.onprocessorstatechange = () => {
should(constructorErrorWorkletNode.processorState, should(constructorErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' + 'constructorErrorWorkletNode.processorState after ' +
'event after the failure from processor.constructor()') 'its construction')
.beEqualTo('error'); .beEqualTo('pending');
task.done(); constructorErrorWorkletNode.onprocessorstatechange = () => {
}; should(constructorErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.constructor()')
.beEqualTo('error');
task.done();
};
});
}); });
// Test the error state caused by the failure of processor's process() // Test the error state caused by the failure of processor's process()
...@@ -83,35 +87,35 @@ ...@@ -83,35 +87,35 @@
audit.define('process-error', audit.define('process-error',
(task, should) => { (task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate); let context = new OfflineAudioContext(1, renderLength, sampleRate);
let processErrorWorkletNode = context.audioWorklet.addModule('state-processor.js').then(() => {
new AudioWorkletNode(context, 'process-error'); let processErrorWorkletNode =
should(processErrorWorkletNode.processorState, new AudioWorkletNode(context, 'process-error');
'processErrorWorkletNode.processorState after ' + should(processErrorWorkletNode.processorState,
'its construction') 'processErrorWorkletNode.processorState after ' +
.beEqualTo('pending'); 'its construction')
.beEqualTo('pending');
processErrorWorkletNode.connect(context.destination); processErrorWorkletNode.connect(context.destination);
let isFirstPhase = true; let isFirstPhase = true;
processErrorWorkletNode.onprocessorstatechange = () => { processErrorWorkletNode.onprocessorstatechange = () => {
if (isFirstPhase) { if (isFirstPhase) {
// Ignore the first state change event, which is "running"; // Ignore the first state change event, which is "running";
isFirstPhase = false; isFirstPhase = false;
} else { } else {
should(processErrorWorkletNode.processorState, should(processErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' + 'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.process()') 'event after the failure from processor.process()')
.beEqualTo('error'); .beEqualTo('error');
task.done(); task.done();
} }
}; };
context.startRendering(); context.startRendering();
});
}); });
audioWorklet.addModule('state-processor.js').then(() => { audit.run();
audit.run();
});
</script> </script>
</body> </body>
</html> </html>
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
<html> <html>
<head> <head>
<title> <title>
Checking window.audioWorklet Checking BaseAudioContext.audioWorklet
</title> </title>
<script src="../../resources/testharness.js"></script> <script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script> <script src="../../resources/testharnessreport.js"></script>
...@@ -16,30 +16,18 @@ ...@@ -16,30 +16,18 @@
let audit = Audit.createTaskRunner(); let audit = Audit.createTaskRunner();
let realtimeContext = new AudioContext();
let offlineContext = new OfflineAudioContext(1, 1, 44100);
// Test if AudioWorklet exists. // Test if AudioWorklet exists.
audit.define('Test if AudioWorklet exists', (task, should) => { audit.define('Test if AudioWorklet exists', (task, should) => {
should(window.audioWorklet instanceof Worklet, should(realtimeContext.audioWorklet instanceof AudioWorklet &&
'window.audioWorklet is an instance of Worklet') offlineContext.audioWorklet instanceof AudioWorklet,
'BaseAudioContext.audioWorklet is an instance of AudioWorklet')
.beTrue(); .beTrue();
task.done(); task.done();
}); });
// Test the construction of BaseAudioContext before |worklet.addModule()|.
audit.define(
'Test invocation of addModule() after BaseAudioContext construction',
(task, should) => {
should(
() => {
let context = new AudioContext();
audioWorklet.addModule('bypass-processor.js');
},
'Calling audioWorklet.addModule() before construction of ' +
'BaseAudioContext')
.notThrow();
task.done();
});
audit.run(); audit.run();
</script> </script>
</body> </body>
......
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
context.channeCountMode = 'explicit'; context.channeCountMode = 'explicit';
context.channelInterpretation = 'discrete'; context.channelInterpretation = 'discrete';
audioWorklet.addModule('gain-processor.js').then(() => { context.audioWorklet.addModule('gain-processor.js').then(() => {
let testBuffer = createConstantBuffer(context, 1, testChannelValues); let testBuffer = createConstantBuffer(context, 1, testChannelValues);
let sourceNode = new AudioBufferSourceNode(context); let sourceNode = new AudioBufferSourceNode(context);
let gainWorkletNode = new AudioWorkletNode(context, 'gain'); let gainWorkletNode = new AudioWorkletNode(context, 'gain');
......
...@@ -60,7 +60,7 @@ ...@@ -60,7 +60,7 @@
porterWorkletNode.port.postMessage('hello'); porterWorkletNode.port.postMessage('hello');
}); });
audioWorklet.addModule('port-processor.js').then(() => { context.audioWorklet.addModule('port-processor.js').then(() => {
audit.run(); audit.run();
}); });
</script> </script>
......
...@@ -50,9 +50,12 @@ ...@@ -50,9 +50,12 @@
task.done(); task.done();
}); });
audioWorklet.addModule('dummy-processor.js').then(() => { Promise.all([
audit.run(); realtimeContext.audioWorklet.addModule('dummy-processor.js'),
}); offlineContext.audioWorklet.addModule('dummy-processor.js')
]).then(() => {
audit.run();
});
</script> </script>
</body> </body>
</html> </html>
...@@ -7,14 +7,18 @@ ...@@ -7,14 +7,18 @@
<script src="/resources/get-host-info.js"></script> <script src="/resources/get-host-info.js"></script>
<script> <script>
// This test should not be upstreamed to WPT because Worklets are now restrected // This test should not be upstreamed to WPT because Worklets are now restricted
// to secure contexts by Chrome's policy. // to secure contexts by Chrome's policy.
const realTimeContext = new AudioContext();
const offlineContext = new OfflineAudioContext(1, 1, 44100);
if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) { if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) {
test(t => { test(t => {
assert_not_equals(undefined, CSS.paintWorklet); assert_not_equals(undefined, CSS.paintWorklet);
assert_not_equals(undefined, window.animationWorklet); assert_not_equals(undefined, window.animationWorklet);
assert_not_equals(undefined, window.audioWorklet); assert_not_equals(undefined, realTimeContext.audioWorklet);
assert_not_equals(undefined, offlineContext.audioWorklet);
}, 'Worklets should be available on a secure context.'); }, 'Worklets should be available on a secure context.');
window.location = get_host_info().UNAUTHENTICATED_ORIGIN + window.location = get_host_info().UNAUTHENTICATED_ORIGIN +
window.location.pathname; window.location.pathname;
...@@ -22,7 +26,8 @@ if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) { ...@@ -22,7 +26,8 @@ if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) {
test(t => { test(t => {
assert_equals(undefined, CSS.paintWorklet); assert_equals(undefined, CSS.paintWorklet);
assert_equals(undefined, window.animationWorklet); assert_equals(undefined, window.animationWorklet);
assert_equals(undefined, window.audioWorklet); assert_equals(undefined, realTimeContext.audioWorklet);
assert_equals(undefined, offlineContext.audioWorklet);
}, 'Worklets should not be available on an insecure context.'); }, 'Worklets should not be available on an insecure context.');
} }
......
...@@ -20,36 +20,37 @@ ...@@ -20,36 +20,37 @@
description: 'Generate warnings if outside nominal range' description: 'Generate warnings if outside nominal range'
}, },
(task, should) => { (task, should) => {
window.audioWorklet.addModule(NoiseGenWorkletUrl).then(() => { // Any reasonable sample rate will work
// Any reasonable sample rate will work let sampleRate = 16000;
let sampleRate = 16000; let renderTime = 1;
let renderTime = 1; let renderLength = renderTime * sampleRate;
let renderLength = renderTime * sampleRate; let context =
let context = new OfflineAudioContext(1, renderLength, sampleRate);
new OfflineAudioContext(1, renderLength, sampleRate); context.audioWorklet.addModule(NoiseGenWorkletUrl).then(() => {
let noiseGenerator = let noiseGenerator =
new AudioWorkletNode(context, 'noise-generator'); new AudioWorkletNode(context, 'noise-generator');
noiseGenerator.connect(context.destination); noiseGenerator.connect(context.destination);
let param = noiseGenerator.parameters.get('amplitude'); let param = noiseGenerator.parameters.get('amplitude');
// Set the value inside the nominal range; no warning should be // Set the value inside the nominal range; no warning should be
// generated. // generated.
param.value = .1; param.value = .1;
// Set the value outside the nominal range to generate a // Set the value outside the nominal range to generate a
// warning. // warning.
param.value = 99; param.value = 99;
// Set up automation outside the nominal range to generate a // Set up automation outside the nominal range to generate a
// warning. // warning.
param.setValueAtTime(-1, renderTime / 4); param.setValueAtTime(-1, renderTime / 4);
param.linearRampToValueAtTime(5, renderTime); param.linearRampToValueAtTime(5, renderTime);
// Render; we don't care what the generated result is. // Render; we don't care what the generated result is.
context.startRendering() context.startRendering()
.then(() => { .then(() => {
should(true, 'Rendering succeeded').beTrue(); should(true, 'Rendering succeeded').beTrue();
}) })
.then(() => task.done()); .then(() => task.done());
})}); });
});
audit.run(); audit.run();
</script> </script>
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
let BaseAudioContextOwnProperties = [ let BaseAudioContextOwnProperties = [
'audioWorklet',
'constructor', 'constructor',
'createAnalyser', 'createAnalyser',
'createBiquadFilter', 'createBiquadFilter',
......
...@@ -402,6 +402,9 @@ interface AudioTrackList : EventTarget ...@@ -402,6 +402,9 @@ interface AudioTrackList : EventTarget
setter onaddtrack setter onaddtrack
setter onchange setter onchange
setter onremovetrack setter onremovetrack
interface AudioWorklet : Worklet
attribute @@toStringTag
method constructor
interface AudioWorkletNode : AudioNode interface AudioWorkletNode : AudioNode
attribute @@toStringTag attribute @@toStringTag
method constructor method constructor
...@@ -451,6 +454,7 @@ interface BarcodeDetector ...@@ -451,6 +454,7 @@ interface BarcodeDetector
method detect method detect
interface BaseAudioContext : EventTarget interface BaseAudioContext : EventTarget
attribute @@toStringTag attribute @@toStringTag
getter audioWorklet
getter currentTime getter currentTime
getter destination getter destination
getter listener getter listener
...@@ -9154,7 +9158,6 @@ interface webkitURL ...@@ -9154,7 +9158,6 @@ interface webkitURL
attribute window attribute window
getter animationWorklet getter animationWorklet
getter applicationCache getter applicationCache
getter audioWorklet
getter caches getter caches
getter clientInformation getter clientInformation
getter cookieStore getter cookieStore
......
...@@ -329,6 +329,7 @@ modules_idl_files = ...@@ -329,6 +329,7 @@ modules_idl_files =
"webaudio/AudioParamMap.idl", "webaudio/AudioParamMap.idl",
"webaudio/AudioProcessingEvent.idl", "webaudio/AudioProcessingEvent.idl",
"webaudio/AudioScheduledSourceNode.idl", "webaudio/AudioScheduledSourceNode.idl",
"webaudio/AudioWorklet.idl",
"webaudio/AudioWorkletGlobalScope.idl", "webaudio/AudioWorkletGlobalScope.idl",
"webaudio/AudioWorkletProcessor.idl", "webaudio/AudioWorkletProcessor.idl",
"webaudio/AudioWorkletNode.idl", "webaudio/AudioWorkletNode.idl",
...@@ -722,7 +723,6 @@ modules_dependency_idl_files = ...@@ -722,7 +723,6 @@ modules_dependency_idl_files =
"vibration/NavigatorVibration.idl", "vibration/NavigatorVibration.idl",
"vr/NavigatorVR.idl", "vr/NavigatorVR.idl",
"wake_lock/ScreenWakeLock.idl", "wake_lock/ScreenWakeLock.idl",
"webaudio/WindowAudioWorklet.idl",
"webdatabase/WindowWebDatabase.idl", "webdatabase/WindowWebDatabase.idl",
"webgl/WebGL2RenderingContextBase.idl", "webgl/WebGL2RenderingContextBase.idl",
"webgl/WebGLRenderingContextBase.idl", "webgl/WebGLRenderingContextBase.idl",
......
...@@ -6,77 +6,90 @@ ...@@ -6,77 +6,90 @@
#include "bindings/core/v8/V8BindingForCore.h" #include "bindings/core/v8/V8BindingForCore.h"
#include "core/dom/Document.h" #include "core/dom/Document.h"
#include "core/frame/LocalDOMWindow.h"
#include "core/frame/LocalFrame.h" #include "core/frame/LocalFrame.h"
#include "core/workers/WorkerClients.h" #include "core/workers/WorkerClients.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h" #include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/BaseAudioContext.h" #include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/CrossThreadAudioWorkletProcessorInfo.h"
namespace blink { namespace blink {
AudioWorklet* AudioWorklet::Create(LocalFrame* frame) { AudioWorklet* AudioWorklet::Create(BaseAudioContext* context) {
return new AudioWorklet(frame); return RuntimeEnabledFeatures::AudioWorkletEnabled()
? new AudioWorklet(context)
: nullptr;
} }
AudioWorklet::AudioWorklet(LocalFrame* frame) : Worklet(frame) {} AudioWorklet::AudioWorklet(BaseAudioContext* context)
: Worklet(context->GetExecutionContext()->ExecutingWindow()->GetFrame()),
context_(context) {}
AudioWorklet::~AudioWorklet() { void AudioWorklet::CreateProcessor(AudioWorkletHandler* handler,
contexts_.clear(); MessagePortChannel message_port_channel) {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
GetMessagingProxy()->CreateProcessor(handler,
std::move(message_port_channel));
} }
void AudioWorklet::RegisterContext(BaseAudioContext* context) { void AudioWorklet::NotifyGlobalScopeIsUpdated() {
DCHECK(!contexts_.Contains(context)); DCHECK(IsMainThread());
contexts_.insert(context);
// Check if AudioWorklet loads the script and has an active if (!worklet_started_) {
// AudioWorkletGlobalScope before getting the messaging proxy. context_->NotifyWorkletIsReady();
if (IsWorkletMessagingProxyCreated()) worklet_started_ = true;
context->SetWorkletMessagingProxy(FindAvailableMessagingProxy()); }
} }
void AudioWorklet::UnregisterContext(BaseAudioContext* context) { WebThread* AudioWorklet::GetBackingThread() {
// This may be called multiple times from BaseAudioContext. DCHECK(IsMainThread());
if (!contexts_.Contains(context)) DCHECK(GetMessagingProxy());
return; return GetMessagingProxy()->GetWorkletBackingThread();
}
contexts_.erase(context); const Vector<CrossThreadAudioParamInfo>
AudioWorklet::GetParamInfoListForProcessor(
const String& name) {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
return GetMessagingProxy()->GetParamInfoListForProcessor(name);
} }
AudioWorkletMessagingProxy* AudioWorklet::FindAvailableMessagingProxy() { bool AudioWorklet::IsProcessorRegistered(const String& name) {
return static_cast<AudioWorkletMessagingProxy*>(FindAvailableGlobalScope()); DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
return GetMessagingProxy()->IsProcessorRegistered(name);
} }
bool AudioWorklet::IsWorkletMessagingProxyCreated() const { bool AudioWorklet::IsReady() {
return GetNumberOfGlobalScopes() > 0; DCHECK(IsMainThread());
return GetMessagingProxy() && GetBackingThread();
} }
bool AudioWorklet::NeedsToCreateGlobalScope() { bool AudioWorklet::NeedsToCreateGlobalScope() {
// TODO(hongchan): support multiple WorkletGlobalScopes, one scope per a
// BaseAudioContext. In order to do it, FindAvailableGlobalScope() needs to
// be inherited and rewritten.
return GetNumberOfGlobalScopes() == 0; return GetNumberOfGlobalScopes() == 0;
} }
WorkletGlobalScopeProxy* AudioWorklet::CreateGlobalScope() { WorkletGlobalScopeProxy* AudioWorklet::CreateGlobalScope() {
DCHECK(NeedsToCreateGlobalScope()); DCHECK(NeedsToCreateGlobalScope());
WorkerClients* worker_clients = WorkerClients::Create();
AudioWorkletMessagingProxy* proxy = AudioWorkletMessagingProxy* proxy =
new AudioWorkletMessagingProxy(GetExecutionContext(), worker_clients); new AudioWorkletMessagingProxy(GetExecutionContext(),
WorkerClients::Create(),
this);
proxy->Initialize(); proxy->Initialize();
for (BaseAudioContext* context : contexts_) {
// TODO(hongchan): Currently all BaseAudioContexts shares a single
// AudioWorkletMessagingProxy. Fix this to support one messaging proxy for
// each BaseAudioContext.
if (!context->HasWorkletMessagingProxy())
context->SetWorkletMessagingProxy(proxy);
}
return proxy; return proxy;
} }
AudioWorkletMessagingProxy* AudioWorklet::GetMessagingProxy() {
return NeedsToCreateGlobalScope()
? nullptr
: static_cast<AudioWorkletMessagingProxy*>(FindAvailableGlobalScope());
}
void AudioWorklet::Trace(blink::Visitor* visitor) { void AudioWorklet::Trace(blink::Visitor* visitor) {
visitor->Trace(contexts_); visitor->Trace(context_);
Worklet::Trace(visitor); Worklet::Trace(visitor);
} }
......
...@@ -11,36 +11,59 @@ ...@@ -11,36 +11,59 @@
namespace blink { namespace blink {
class AudioWorkletHandler;
class AudioWorkletMessagingProxy; class AudioWorkletMessagingProxy;
class BaseAudioContext; class BaseAudioContext;
class LocalFrame; class CrossThreadAudioParamInfo;
class MessagePortChannel;
class MODULES_EXPORT AudioWorklet final : public Worklet { class MODULES_EXPORT AudioWorklet final : public Worklet {
DEFINE_WRAPPERTYPEINFO();
USING_GARBAGE_COLLECTED_MIXIN(AudioWorklet);
WTF_MAKE_NONCOPYABLE(AudioWorklet); WTF_MAKE_NONCOPYABLE(AudioWorklet);
public: public:
static AudioWorklet* Create(LocalFrame*); // When the AudioWorklet runtime flag is not enabled, this constructor returns
~AudioWorklet() override; // |nullptr|.
static AudioWorklet* Create(BaseAudioContext*);
void RegisterContext(BaseAudioContext*); ~AudioWorklet() = default;
void UnregisterContext(BaseAudioContext*);
AudioWorkletMessagingProxy* FindAvailableMessagingProxy(); void CreateProcessor(AudioWorkletHandler*, MessagePortChannel);
virtual void Trace(blink::Visitor*); // Invoked by AudioWorkletMessagingProxy. Notifies |context_| when
// AudioWorkletGlobalScope finishes the first script evaluation and is ready
// for the worklet operation. Can be used for other post-evaluation tasks
// in AudioWorklet or BaseAudioContext.
void NotifyGlobalScopeIsUpdated();
WebThread* GetBackingThread();
const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor(
const String& name);
bool IsProcessorRegistered(const String& name);
// Returns |true| when a AudioWorkletMessagingProxy and a WorkletBackingThread
// are ready.
bool IsReady();
void Trace(blink::Visitor*) override;
private: private:
explicit AudioWorklet(LocalFrame*); explicit AudioWorklet(BaseAudioContext*);
// Implements Worklet. // Implements Worklet
bool NeedsToCreateGlobalScope() final; bool NeedsToCreateGlobalScope() final;
WorkletGlobalScopeProxy* CreateGlobalScope() final; WorkletGlobalScopeProxy* CreateGlobalScope() final;
bool IsWorkletMessagingProxyCreated() const; // Returns |nullptr| if there is no active WorkletGlobalScope().
AudioWorkletMessagingProxy* GetMessagingProxy();
// To catch the first global scope update and notify the context.
bool worklet_started_ = false;
// AudioWorklet keeps the reference of all active BaseAudioContexts, so it Member<BaseAudioContext> context_;
// can notify the contexts when a script is loaded in AudioWorkletGlobalScope.
HeapHashSet<Member<BaseAudioContext>> contexts_;
}; };
} // namespace blink } // namespace blink
......
...@@ -5,9 +5,7 @@ ...@@ -5,9 +5,7 @@
// https://webaudio.github.io/web-audio-api/#AudioWorklet // https://webaudio.github.io/web-audio-api/#AudioWorklet
[ [
ImplementedAs=WindowAudioWorklet,
RuntimeEnabled=AudioWorklet, RuntimeEnabled=AudioWorklet,
SecureContext SecureContext
] partial interface Window { ] interface AudioWorklet : Worklet {
readonly attribute Worklet audioWorklet;
}; };
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "modules/webaudio/AudioWorkletMessagingProxy.h" #include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "core/dom/MessagePort.h" #include "core/dom/MessagePort.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletGlobalScope.h" #include "modules/webaudio/AudioWorkletGlobalScope.h"
#include "modules/webaudio/AudioWorkletNode.h" #include "modules/webaudio/AudioWorkletNode.h"
#include "modules/webaudio/AudioWorkletObjectProxy.h" #include "modules/webaudio/AudioWorkletObjectProxy.h"
...@@ -17,10 +18,10 @@ namespace blink { ...@@ -17,10 +18,10 @@ namespace blink {
AudioWorkletMessagingProxy::AudioWorkletMessagingProxy( AudioWorkletMessagingProxy::AudioWorkletMessagingProxy(
ExecutionContext* execution_context, ExecutionContext* execution_context,
WorkerClients* worker_clients) WorkerClients* worker_clients,
: ThreadedWorkletMessagingProxy(execution_context, worker_clients) {} AudioWorklet* worklet)
: ThreadedWorkletMessagingProxy(execution_context, worker_clients),
AudioWorkletMessagingProxy::~AudioWorkletMessagingProxy() {} worklet_(worklet) {}
void AudioWorkletMessagingProxy::CreateProcessor( void AudioWorkletMessagingProxy::CreateProcessor(
AudioWorkletHandler* handler, AudioWorkletHandler* handler,
...@@ -61,6 +62,10 @@ void AudioWorkletMessagingProxy::SynchronizeWorkletProcessorInfoList( ...@@ -61,6 +62,10 @@ void AudioWorkletMessagingProxy::SynchronizeWorkletProcessorInfoList(
processor_info_map_.insert(processor_info.Name(), processor_info_map_.insert(processor_info.Name(),
processor_info.ParamInfoList()); processor_info.ParamInfoList());
} }
// Notify AudioWorklet object that the global scope has been updated after the
// script evaluation.
worklet_->NotifyGlobalScopeIsUpdated();
} }
bool AudioWorkletMessagingProxy::IsProcessorRegistered( bool AudioWorkletMessagingProxy::IsProcessorRegistered(
...@@ -94,4 +99,10 @@ std::unique_ptr<WorkerThread> AudioWorkletMessagingProxy::CreateWorkerThread() { ...@@ -94,4 +99,10 @@ std::unique_ptr<WorkerThread> AudioWorkletMessagingProxy::CreateWorkerThread() {
WorkletObjectProxy()); WorkletObjectProxy());
} }
void AudioWorkletMessagingProxy::Trace(Visitor* visitor) {
visitor->Trace(worklet_);
ThreadedWorkletMessagingProxy::Trace(visitor);
}
} // namespace blink } // namespace blink
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
namespace blink { namespace blink {
class AudioWorklet;
class AudioWorkletHandler; class AudioWorkletHandler;
class CrossThreadAudioParamInfo; class CrossThreadAudioParamInfo;
class CrossThreadAudioWorkletProcessorInfo; class CrossThreadAudioWorkletProcessorInfo;
...@@ -23,7 +24,7 @@ class WorkerThread; ...@@ -23,7 +24,7 @@ class WorkerThread;
// scope via AudioWorkletObjectProxy. // scope via AudioWorkletObjectProxy.
class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy { class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
public: public:
AudioWorkletMessagingProxy(ExecutionContext*, WorkerClients*); AudioWorkletMessagingProxy(ExecutionContext*, WorkerClients*, AudioWorklet*);
// Since the creation of AudioWorkletProcessor needs to be done in the // Since the creation of AudioWorkletProcessor needs to be done in the
// different thread, this method is a wrapper for cross-thread task posting. // different thread, this method is a wrapper for cross-thread task posting.
...@@ -54,9 +55,9 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy { ...@@ -54,9 +55,9 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
WebThread* GetWorkletBackingThread(); WebThread* GetWorkletBackingThread();
private: void Trace(Visitor*);
~AudioWorkletMessagingProxy() override;
private:
// Implements ThreadedWorkletMessagingProxy. // Implements ThreadedWorkletMessagingProxy.
std::unique_ptr<ThreadedWorkletObjectProxy> CreateObjectProxy( std::unique_ptr<ThreadedWorkletObjectProxy> CreateObjectProxy(
ThreadedWorkletMessagingProxy*, ThreadedWorkletMessagingProxy*,
...@@ -66,6 +67,8 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy { ...@@ -66,6 +67,8 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
// Each entry consists of processor name and associated AudioParam list. // Each entry consists of processor name and associated AudioParam list.
HashMap<String, Vector<CrossThreadAudioParamInfo>> processor_info_map_; HashMap<String, Vector<CrossThreadAudioParamInfo>> processor_info_map_;
Member<AudioWorklet> worklet_;
}; };
} // namespace blink } // namespace blink
......
...@@ -11,8 +11,7 @@ ...@@ -11,8 +11,7 @@
#include "modules/webaudio/AudioNodeInput.h" #include "modules/webaudio/AudioNodeInput.h"
#include "modules/webaudio/AudioNodeOutput.h" #include "modules/webaudio/AudioNodeOutput.h"
#include "modules/webaudio/AudioParamDescriptor.h" #include "modules/webaudio/AudioParamDescriptor.h"
#include "modules/webaudio/AudioWorkletGlobalScope.h" #include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/AudioWorkletProcessor.h" #include "modules/webaudio/AudioWorkletProcessor.h"
#include "modules/webaudio/AudioWorkletProcessorDefinition.h" #include "modules/webaudio/AudioWorkletProcessorDefinition.h"
#include "modules/webaudio/CrossThreadAudioWorkletProcessorInfo.h" #include "modules/webaudio/CrossThreadAudioWorkletProcessorInfo.h"
...@@ -277,7 +276,7 @@ AudioWorkletNode* AudioWorkletNode::Create( ...@@ -277,7 +276,7 @@ AudioWorkletNode* AudioWorkletNode::Create(
} }
} }
if (!context->HasWorkletMessagingProxy()) { if (!context->audioWorklet()->IsReady()) {
exception_state.ThrowDOMException( exception_state.ThrowDOMException(
kInvalidStateError, kInvalidStateError,
"AudioWorkletNode cannot be created: AudioWorklet does not have a " "AudioWorkletNode cannot be created: AudioWorklet does not have a "
...@@ -286,9 +285,7 @@ AudioWorkletNode* AudioWorkletNode::Create( ...@@ -286,9 +285,7 @@ AudioWorkletNode* AudioWorkletNode::Create(
return nullptr; return nullptr;
} }
AudioWorkletMessagingProxy* proxy = context->WorkletMessagingProxy(); if (!context->audioWorklet()->IsProcessorRegistered(name)) {
if (!proxy->IsProcessorRegistered(name)) {
exception_state.ThrowDOMException( exception_state.ThrowDOMException(
kInvalidStateError, kInvalidStateError,
"AudioWorkletNode cannot be created: The node name '" + name + "AudioWorkletNode cannot be created: The node name '" + name +
...@@ -302,8 +299,8 @@ AudioWorkletNode* AudioWorkletNode::Create( ...@@ -302,8 +299,8 @@ AudioWorkletNode* AudioWorkletNode::Create(
AudioWorkletNode* node = AudioWorkletNode* node =
new AudioWorkletNode(*context, name, options, new AudioWorkletNode(*context, name, options,
proxy->GetParamInfoListForProcessor(name), context->audioWorklet()->GetParamInfoListForProcessor(name),
channel->port1()); channel->port1());
if (!node) { if (!node) {
exception_state.ThrowDOMException( exception_state.ThrowDOMException(
...@@ -319,8 +316,8 @@ AudioWorkletNode* AudioWorkletNode::Create( ...@@ -319,8 +316,8 @@ AudioWorkletNode* AudioWorkletNode::Create(
// This is non-blocking async call. |node| still can be returned to user // This is non-blocking async call. |node| still can be returned to user
// before the scheduled async task is completed. // before the scheduled async task is completed.
proxy->CreateProcessor(&node->GetWorkletHandler(), context->audioWorklet()->CreateProcessor(&node->GetWorkletHandler(),
std::move(processor_port_channel)); std::move(processor_port_channel));
return node; return node;
} }
......
...@@ -125,19 +125,9 @@ blink_modules_sources("webaudio") { ...@@ -125,19 +125,9 @@ blink_modules_sources("webaudio") {
"WaveShaperNode.h", "WaveShaperNode.h",
"WaveShaperProcessor.cpp", "WaveShaperProcessor.cpp",
"WaveShaperProcessor.h", "WaveShaperProcessor.h",
"WindowAudioWorklet.cpp",
"WindowAudioWorklet.h",
] ]
if (is_win) { if (is_win) {
jumbo_excluded_sources = [
# Uses Supplement<LocalDOMWindow> with MODULES_EXPORT while
# other files use Supplement<LocalDOMWindow> with
# CORE_EXPORT. Mixing those in the same compilation unit
# triggers link errors in Windows. https://crbug.com/739340
"WindowAudioWorklet.cpp",
]
# Result of 32-bit shift implicitly converted to 64 bits. # Result of 32-bit shift implicitly converted to 64 bits.
cflags = [ "/wd4334" ] cflags = [ "/wd4334" ]
} }
......
...@@ -71,7 +71,6 @@ ...@@ -71,7 +71,6 @@
#include "modules/webaudio/ScriptProcessorNode.h" #include "modules/webaudio/ScriptProcessorNode.h"
#include "modules/webaudio/StereoPannerNode.h" #include "modules/webaudio/StereoPannerNode.h"
#include "modules/webaudio/WaveShaperNode.h" #include "modules/webaudio/WaveShaperNode.h"
#include "modules/webaudio/WindowAudioWorklet.h"
#include "platform/CrossThreadFunctional.h" #include "platform/CrossThreadFunctional.h"
#include "platform/Histogram.h" #include "platform/Histogram.h"
#include "platform/audio/IIRFilter.h" #include "platform/audio/IIRFilter.h"
...@@ -137,9 +136,6 @@ BaseAudioContext::BaseAudioContext(Document* document, ...@@ -137,9 +136,6 @@ BaseAudioContext::BaseAudioContext(Document* document,
BaseAudioContext::~BaseAudioContext() { BaseAudioContext::~BaseAudioContext() {
GetDeferredTaskHandler().ContextWillBeDestroyed(); GetDeferredTaskHandler().ContextWillBeDestroyed();
// AudioNodes keep a reference to their context, so there should be no way to
// be in the destructor if there are still AudioNodes around.
DCHECK(!IsDestinationInitialized());
DCHECK(!active_source_nodes_.size()); DCHECK(!active_source_nodes_.size());
DCHECK(!is_resolving_resume_promises_); DCHECK(!is_resolving_resume_promises_);
DCHECK(!resume_resolvers_.size()); DCHECK(!resume_resolvers_.size());
...@@ -152,21 +148,16 @@ void BaseAudioContext::Initialize() { ...@@ -152,21 +148,16 @@ void BaseAudioContext::Initialize() {
FFTFrame::Initialize(); FFTFrame::Initialize();
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
audio_worklet_ = AudioWorklet::Create(this);
}
if (destination_node_) { if (destination_node_) {
destination_node_->Handler().Initialize(); destination_node_->Handler().Initialize();
// The AudioParams in the listener need access to the destination node, so // The AudioParams in the listener need access to the destination node, so
// only create the listener if the destination node exists. // only create the listener if the destination node exists.
listener_ = AudioListener::Create(*this); listener_ = AudioListener::Create(*this);
} }
// Check if a document or a frame supports AudioWorklet. If not, AudioWorklet
// cannot be accessed.
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
AudioWorklet* audioWorklet = WindowAudioWorklet::audioWorklet(
*GetExecutionContext()->ExecutingWindow());
if (audioWorklet)
audioWorklet->RegisterContext(this);
}
} }
void BaseAudioContext::Clear() { void BaseAudioContext::Clear() {
...@@ -180,17 +171,6 @@ void BaseAudioContext::Clear() { ...@@ -180,17 +171,6 @@ void BaseAudioContext::Clear() {
void BaseAudioContext::Uninitialize() { void BaseAudioContext::Uninitialize() {
DCHECK(IsMainThread()); DCHECK(IsMainThread());
// AudioWorklet may be destroyed before the context goes away. So we have to
// check the pointer. See: crbug.com/503845
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
AudioWorklet* audioWorklet = WindowAudioWorklet::audioWorklet(
*GetExecutionContext()->ExecutingWindow());
if (audioWorklet) {
audioWorklet->UnregisterContext(this);
worklet_messaging_proxy_.Clear();
}
}
if (!IsDestinationInitialized()) if (!IsDestinationInitialized())
return; return;
...@@ -1012,7 +992,7 @@ void BaseAudioContext::Trace(blink::Visitor* visitor) { ...@@ -1012,7 +992,7 @@ void BaseAudioContext::Trace(blink::Visitor* visitor) {
visitor->Trace(periodic_wave_square_); visitor->Trace(periodic_wave_square_);
visitor->Trace(periodic_wave_sawtooth_); visitor->Trace(periodic_wave_sawtooth_);
visitor->Trace(periodic_wave_triangle_); visitor->Trace(periodic_wave_triangle_);
visitor->Trace(worklet_messaging_proxy_); visitor->Trace(audio_worklet_);
EventTargetWithInlineData::Trace(visitor); EventTargetWithInlineData::Trace(visitor);
PausableObject::Trace(visitor); PausableObject::Trace(visitor);
} }
...@@ -1038,15 +1018,12 @@ SecurityOrigin* BaseAudioContext::GetSecurityOrigin() const { ...@@ -1038,15 +1018,12 @@ SecurityOrigin* BaseAudioContext::GetSecurityOrigin() const {
return nullptr; return nullptr;
} }
bool BaseAudioContext::HasWorkletMessagingProxy() const { AudioWorklet* BaseAudioContext::audioWorklet() const {
return has_worklet_messaging_proxy_; return audio_worklet_.Get();
} }
void BaseAudioContext::SetWorkletMessagingProxy( void BaseAudioContext::NotifyWorkletIsReady() {
AudioWorkletMessagingProxy* proxy) { DCHECK(audioWorklet()->IsReady());
DCHECK(!worklet_messaging_proxy_);
worklet_messaging_proxy_ = proxy;
has_worklet_messaging_proxy_ = true;
// If the context is running or suspended, restart the destination to switch // If the context is running or suspended, restart the destination to switch
// the render thread with the worklet thread. Note that restarting can happen // the render thread with the worklet thread. Note that restarting can happen
...@@ -1056,9 +1033,4 @@ void BaseAudioContext::SetWorkletMessagingProxy( ...@@ -1056,9 +1033,4 @@ void BaseAudioContext::SetWorkletMessagingProxy(
} }
} }
AudioWorkletMessagingProxy* BaseAudioContext::WorkletMessagingProxy() {
DCHECK(worklet_messaging_proxy_);
return worklet_messaging_proxy_;
}
} // namespace blink } // namespace blink
...@@ -55,7 +55,7 @@ class AudioBuffer; ...@@ -55,7 +55,7 @@ class AudioBuffer;
class AudioBufferSourceNode; class AudioBufferSourceNode;
class AudioContextOptions; class AudioContextOptions;
class AudioListener; class AudioListener;
class AudioWorkletMessagingProxy; class AudioWorklet;
class BiquadFilterNode; class BiquadFilterNode;
class ChannelMergerNode; class ChannelMergerNode;
class ChannelSplitterNode; class ChannelSplitterNode;
...@@ -334,9 +334,13 @@ class MODULES_EXPORT BaseAudioContext ...@@ -334,9 +334,13 @@ class MODULES_EXPORT BaseAudioContext
// gesture while the AudioContext requires a user gesture. // gesture while the AudioContext requires a user gesture.
void MaybeRecordStartAttempt(); void MaybeRecordStartAttempt();
void SetWorkletMessagingProxy(AudioWorkletMessagingProxy*); // AudioWorklet IDL
AudioWorkletMessagingProxy* WorkletMessagingProxy(); AudioWorklet* audioWorklet() const;
bool HasWorkletMessagingProxy() const;
// Callback from AudioWorklet, invoked when the associated
// AudioWorkletGlobalScope is created and the worklet operation is ready after
// the first script evaluation.
void NotifyWorkletIsReady();
// TODO(crbug.com/764396): Remove this when fixed. // TODO(crbug.com/764396): Remove this when fixed.
virtual void CountValueSetterConflict(bool does_conflict){}; virtual void CountValueSetterConflict(bool does_conflict){};
...@@ -518,8 +522,7 @@ class MODULES_EXPORT BaseAudioContext ...@@ -518,8 +522,7 @@ class MODULES_EXPORT BaseAudioContext
Optional<AutoplayStatus> autoplay_status_; Optional<AutoplayStatus> autoplay_status_;
AudioIOPosition output_position_; AudioIOPosition output_position_;
bool has_worklet_messaging_proxy_ = false; Member<AudioWorklet> audio_worklet_;
Member<AudioWorkletMessagingProxy> worklet_messaging_proxy_;
}; };
} // namespace blink } // namespace blink
......
...@@ -68,5 +68,7 @@ callback DecodeSuccessCallback = void (AudioBuffer decodedData); ...@@ -68,5 +68,7 @@ callback DecodeSuccessCallback = void (AudioBuffer decodedData);
[RaisesException, MeasureAs=AudioContextCreateMediaStreamSource] MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream); [RaisesException, MeasureAs=AudioContextCreateMediaStreamSource] MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
[RaisesException, MeasureAs=AudioContextCreateMediaStreamDestination] MediaStreamAudioDestinationNode createMediaStreamDestination(); [RaisesException, MeasureAs=AudioContextCreateMediaStreamDestination] MediaStreamAudioDestinationNode createMediaStreamDestination();
[RuntimeEnabled=AudioWorklet, SecureContext] readonly attribute AudioWorklet audioWorklet;
attribute EventHandler onstatechange; attribute EventHandler onstatechange;
}; };
...@@ -28,8 +28,8 @@ ...@@ -28,8 +28,8 @@
#include "bindings/core/v8/ExceptionMessages.h" #include "bindings/core/v8/ExceptionMessages.h"
#include "bindings/core/v8/ExceptionState.h" #include "bindings/core/v8/ExceptionState.h"
#include "core/dom/ExceptionCode.h" #include "core/dom/ExceptionCode.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/BaseAudioContext.h" #include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
namespace blink { namespace blink {
...@@ -89,13 +89,12 @@ void DefaultAudioDestinationHandler::CreateDestination() { ...@@ -89,13 +89,12 @@ void DefaultAudioDestinationHandler::CreateDestination() {
void DefaultAudioDestinationHandler::StartDestination() { void DefaultAudioDestinationHandler::StartDestination() {
DCHECK(!destination_->IsPlaying()); DCHECK(!destination_->IsPlaying());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and
// there is an active AudioWorkletGlobalScope. // Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
if (RuntimeEnabledFeatures::AudioWorkletEnabled() && // the worklet thread and the global scope are ready.
Context()->HasWorkletMessagingProxy()) { if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
destination_->StartWithWorkletThread( destination_->StartWithWorkletThread(
Context()->WorkletMessagingProxy()->GetWorkletBackingThread()); Context()->audioWorklet()->GetBackingThread());
} else { } else {
destination_->Start(); destination_->Start();
} }
......
...@@ -28,8 +28,7 @@ ...@@ -28,8 +28,7 @@
#include <algorithm> #include <algorithm>
#include "modules/webaudio/AudioNodeInput.h" #include "modules/webaudio/AudioNodeInput.h"
#include "modules/webaudio/AudioNodeOutput.h" #include "modules/webaudio/AudioNodeOutput.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h" #include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletThread.h"
#include "modules/webaudio/BaseAudioContext.h" #include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/OfflineAudioContext.h" #include "modules/webaudio/OfflineAudioContext.h"
#include "platform/CrossThreadFunctional.h" #include "platform/CrossThreadFunctional.h"
...@@ -146,13 +145,10 @@ void OfflineAudioDestinationHandler::InitializeOfflineRenderThread( ...@@ -146,13 +145,10 @@ void OfflineAudioDestinationHandler::InitializeOfflineRenderThread(
AudioBuffer* render_target) { AudioBuffer* render_target) {
DCHECK(IsMainThread()); DCHECK(IsMainThread());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and // Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
// there is an active AudioWorkletGlobalScope. // the worklet thread and the global scope are ready.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() && if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
Context()->HasWorkletMessagingProxy()) { worklet_backing_thread_ = Context()->audioWorklet()->GetBackingThread();
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
worklet_backing_thread_ =
Context()->WorkletMessagingProxy()->GetWorkletBackingThread();
} else { } else {
render_thread_ = render_thread_ =
Platform::Current()->CreateThread("offline audio renderer"); Platform::Current()->CreateThread("offline audio renderer");
...@@ -365,10 +361,9 @@ bool OfflineAudioDestinationHandler::RenderIfNotSuspended( ...@@ -365,10 +361,9 @@ bool OfflineAudioDestinationHandler::RenderIfNotSuspended(
WebThread* OfflineAudioDestinationHandler::GetRenderingThread() { WebThread* OfflineAudioDestinationHandler::GetRenderingThread() {
DCHECK(IsInitialized()); DCHECK(IsInitialized());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and // Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
// there is an active AudioWorkletGlobalScope. // the worklet thread and the global scope are ready.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() && if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
Context()->HasWorkletMessagingProxy()) {
DCHECK(!render_thread_ && worklet_backing_thread_); DCHECK(!render_thread_ && worklet_backing_thread_);
return worklet_backing_thread_; return worklet_backing_thread_;
} }
...@@ -381,12 +376,9 @@ void OfflineAudioDestinationHandler::RestartRendering() { ...@@ -381,12 +376,9 @@ void OfflineAudioDestinationHandler::RestartRendering() {
// If the worklet thread is not assigned yet, that means the context has // If the worklet thread is not assigned yet, that means the context has
// started without a valid WorkletGlobalScope. Assign the worklet thread, // started without a valid WorkletGlobalScope. Assign the worklet thread,
// and it will be picked up when the GetRenderingThread() is called next. // and it will be picked up when the GetRenderingThread() is called next.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() && if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady() &&
Context()->HasWorkletMessagingProxy() &&
!worklet_backing_thread_) { !worklet_backing_thread_) {
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread()); worklet_backing_thread_ = Context()->audioWorklet()->GetBackingThread();
worklet_backing_thread_ =
Context()->WorkletMessagingProxy()->GetWorkletBackingThread();
} }
}; };
......
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "modules/webaudio/WindowAudioWorklet.h"
#include "core/dom/Document.h"
#include "core/frame/LocalDOMWindow.h"
#include "core/frame/LocalFrame.h"
namespace blink {
AudioWorklet* WindowAudioWorklet::audioWorklet(LocalDOMWindow& window) {
if (!window.GetFrame())
return nullptr;
return From(window).audio_worklet_.Get();
}
// Break the following cycle when the context gets detached.
// Otherwise, the worklet object will leak.
//
// window => window.audioWorklet
// => WindowAudioWorklet
// => AudioWorklet <--- break this reference
// => ThreadedWorkletMessagingProxy
// => Document
// => ... => window
void WindowAudioWorklet::ContextDestroyed(ExecutionContext*) {
audio_worklet_ = nullptr;
}
void WindowAudioWorklet::Trace(blink::Visitor* visitor) {
visitor->Trace(audio_worklet_);
Supplement<LocalDOMWindow>::Trace(visitor);
ContextLifecycleObserver::Trace(visitor);
}
WindowAudioWorklet& WindowAudioWorklet::From(LocalDOMWindow& window) {
WindowAudioWorklet* supplement = static_cast<WindowAudioWorklet*>(
Supplement<LocalDOMWindow>::From(window, SupplementName()));
if (!supplement) {
supplement = new WindowAudioWorklet(window);
ProvideTo(window, SupplementName(), supplement);
}
return *supplement;
}
WindowAudioWorklet::WindowAudioWorklet(LocalDOMWindow& window)
: ContextLifecycleObserver(window.GetFrame()->GetDocument()),
audio_worklet_(AudioWorklet::Create(window.GetFrame())) {
DCHECK(GetExecutionContext());
}
const char* WindowAudioWorklet::SupplementName() {
return "WindowAudioWorklet";
}
} // namespace blink
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WindowAudioWorklet_h
#define WindowAudioWorklet_h
#include "core/dom/ContextLifecycleObserver.h"
#include "modules/ModulesExport.h"
#include "modules/webaudio/AudioWorklet.h"
#include "platform/Supplementable.h"
#include "platform/heap/Handle.h"
namespace blink {
class LocalDOMWindow;
class MODULES_EXPORT WindowAudioWorklet final
: public GarbageCollected<WindowAudioWorklet>,
public Supplement<LocalDOMWindow>,
public ContextLifecycleObserver {
USING_GARBAGE_COLLECTED_MIXIN(WindowAudioWorklet);
public:
static AudioWorklet* audioWorklet(LocalDOMWindow&);
void ContextDestroyed(ExecutionContext*) override;
void Trace(blink::Visitor*);
private:
static WindowAudioWorklet& From(LocalDOMWindow&);
explicit WindowAudioWorklet(LocalDOMWindow&);
static const char* SupplementName();
Member<AudioWorklet> audio_worklet_;
};
} // namespace blink
#endif // WindowAudioWorklet_h
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment