Commit 249e75ed authored by Hongchan Choi's avatar Hongchan Choi Committed by Commit Bot

Move audioWorklet under BaseAudioContext

Spec PR (approved): https://github.com/WebAudio/web-audio-api/pull/1445

Based on the spec change, this CL moves audioWorklet from `window`
to `BaseAudioContext`. This alters the relationship between
BaseAudioContext, AudioWorkletMessagingProxy and AudioWorkletGlobalScope:

- Now AWMP/AWGS cannot exist without BaseAudioContext, this reduces
  the code complexity by removing checks.
- With this clear ownership, BaseAudioContext only needs to talk to
  AudioWorklet. AudioWorkletMessagingProxy can be hidden.
- When the script evaluation is completed, AWGS will notify the
  associated BaseAudioContext via AWObjectProxy - AWMessagingProxy.

Bug: 786542
Change-Id: I7b55fcacf304a4a25b527bcbcd285bef80881b8d
Reviewed-on: https://chromium-review.googlesource.com/779862
Commit-Queue: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Reviewed-by: default avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: default avatarHiroki Nakagawa <nhiroki@chromium.org>
Cr-Commit-Position: refs/heads/master@{#521078}
parent fe1dd778
/**
* Assert AudioWorklet is enabled.
*
* The content_shell driven by run-webkit-tests.py is supposed to enable all the
* experimental web platform features.
*
* We also want to run the test on the browser. So we check both cases for
* the content shell and the browser.
*/
// Check if AudioWorklet is available before running a test.
function assertAudioWorklet() {
let offlineContext = new OfflineAudioContext(1, 1, 44100);
// We want to be able to run tests both on the browser and the content shell.
// So check if AudioWorklet runtime flag is enabled, or check the context
// has AudioWorklet object.
if ((Boolean(window.internals) &&
Boolean(window.internals.runtimeFlags.audioWorkletEnabled)) ||
(Boolean(window.Worklet) && Boolean(window.audioWorklet))) {
offlineContext.audioWorklet instanceof AudioWorklet) {
return;
}
......
......@@ -21,8 +21,8 @@
// Sets up AudioWorklet and OfflineAudioContext.
audit.define('Initializing AudioWorklet and Context', (task, should) => {
audioWorklet.addModule('gain-processor.js').then(() => {
context = new OfflineAudioContext(1, renderLength, sampleRate);
context = new OfflineAudioContext(1, renderLength, sampleRate);
context.audioWorklet.addModule('gain-processor.js').then(() => {
task.done();
});
});
......
......@@ -36,17 +36,19 @@
audit.define(
{label: 'construction-after-module-loading'},
(task, should) => {
audioWorklet.addModule('dummy-processor.js').then(() => {
let dummyWorkletNode =
new AudioWorkletNode(realtimeContext, 'dummy');
should(dummyWorkletNode instanceof AudioWorkletNode,
'"dummyWorkletNode" is an instance of AudioWorkletNode')
.beTrue();
should(() => new AudioWorkletNode(realtimeContext, 'foobar'),
'Unregistered name "foobar" must throw an exception.')
.throw();
task.done();
});
realtimeContext.audioWorklet.addModule('dummy-processor.js')
.then(() => {
let dummyWorkletNode =
new AudioWorkletNode(realtimeContext, 'dummy');
should(dummyWorkletNode instanceof AudioWorkletNode,
'"dummyWorkletNode" is an instance of '
+ 'AudioWorkletNode')
.beTrue();
should(() => new AudioWorkletNode(realtimeContext, 'foobar'),
'Unregistered name "foobar" must throw an exception.')
.throw();
task.done();
});
});
audit.run();
......
......@@ -21,8 +21,8 @@
// Load script file and create a OfflineAudiocontext.
audit.define('setup', (task, should) => {
audioWorklet.addModule('dummy-processor.js').then(() => {
context = new OfflineAudioContext(1, 1, sampleRate);
context = new OfflineAudioContext(1, 1, sampleRate);
context.audioWorklet.addModule('dummy-processor.js').then(() => {
task.done();
});
});
......
......@@ -23,59 +23,63 @@
audit.define('pending-running-stopped',
(task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate);
let timedWorkletNode = new AudioWorkletNode(context, 'timed');
context.audioWorklet.addModule('state-processor.js').then(() => {
let timedWorkletNode = new AudioWorkletNode(context, 'timed');
// The construction of associated processor has not been
// completed. In this state, no audio processing can happen and
// all messages to the processor will be queued.
should(timedWorkletNode.processorState,
'Checking the processor state upon the constructor call')
.beEqualTo('pending');
// The construction of associated processor has not been
// completed. In this state, no audio processing can happen and
// all messages to the processor will be queued.
should(timedWorkletNode.processorState,
'Checking the processor state upon the constructor call')
.beEqualTo('pending');
timedWorkletNode.connect(context.destination);
timedWorkletNode.connect(context.destination);
// Checks the handler of |onprocessorstatechange|. Because the
// processor script is correct, the |running| state change MUST
// be fired.
let isFirstPhase = true;
timedWorkletNode.onprocessorstatechange = () => {
// The first phase should be "running".
if (isFirstPhase) {
should(timedWorkletNode.processorState,
'Checking the processor state upon ' +
'processorstatechange event')
.beEqualTo('running');
isFirstPhase = false;
} else {
// The second phase in this test must be "stopped".
should(timedWorkletNode.processorState,
'Checking the processor state after ' +
'processor stopped processing')
.beEqualTo('stopped');
task.done();
}
};
// Checks the handler of |onprocessorstatechange|. Because the
// processor script is correct, the |running| state change MUST
// be fired.
let isFirstPhase = true;
timedWorkletNode.onprocessorstatechange = () => {
// The first phase should be "running".
if (isFirstPhase) {
should(timedWorkletNode.processorState,
'Checking the processor state upon ' +
'processorstatechange event')
.beEqualTo('running');
isFirstPhase = false;
} else {
// The second phase in this test must be "stopped".
should(timedWorkletNode.processorState,
'Checking the processor state after ' +
'processor stopped processing')
.beEqualTo('stopped');
task.done();
}
};
context.startRendering();
context.startRendering();
});
});
// Test the error state caused by the failure of processor constructor.
audit.define('constructor-error',
(task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate);
let constructorErrorWorkletNode =
new AudioWorkletNode(context, 'constructor-error');
should(constructorErrorWorkletNode.processorState,
'constructorErrorWorkletNode.processorState after ' +
'its construction')
.beEqualTo('pending');
constructorErrorWorkletNode.onprocessorstatechange = () => {
context.audioWorklet.addModule('state-processor.js').then(() => {
let constructorErrorWorkletNode =
new AudioWorkletNode(context, 'constructor-error');
should(constructorErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.constructor()')
.beEqualTo('error');
task.done();
};
'constructorErrorWorkletNode.processorState after ' +
'its construction')
.beEqualTo('pending');
constructorErrorWorkletNode.onprocessorstatechange = () => {
should(constructorErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.constructor()')
.beEqualTo('error');
task.done();
};
});
});
// Test the error state caused by the failure of processor's process()
......@@ -83,35 +87,35 @@
audit.define('process-error',
(task, should) => {
let context = new OfflineAudioContext(1, renderLength, sampleRate);
let processErrorWorkletNode =
new AudioWorkletNode(context, 'process-error');
should(processErrorWorkletNode.processorState,
'processErrorWorkletNode.processorState after ' +
'its construction')
.beEqualTo('pending');
context.audioWorklet.addModule('state-processor.js').then(() => {
let processErrorWorkletNode =
new AudioWorkletNode(context, 'process-error');
should(processErrorWorkletNode.processorState,
'processErrorWorkletNode.processorState after ' +
'its construction')
.beEqualTo('pending');
processErrorWorkletNode.connect(context.destination);
processErrorWorkletNode.connect(context.destination);
let isFirstPhase = true;
processErrorWorkletNode.onprocessorstatechange = () => {
if (isFirstPhase) {
// Ignore the first state change event, which is "running";
isFirstPhase = false;
} else {
should(processErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.process()')
.beEqualTo('error');
task.done();
}
};
let isFirstPhase = true;
processErrorWorkletNode.onprocessorstatechange = () => {
if (isFirstPhase) {
// Ignore the first state change event, which is "running";
isFirstPhase = false;
} else {
should(processErrorWorkletNode.processorState,
'workletNode.processorState upon processorstatechange ' +
'event after the failure from processor.process()')
.beEqualTo('error');
task.done();
}
};
context.startRendering();
context.startRendering();
});
});
audioWorklet.addModule('state-processor.js').then(() => {
audit.run();
});
audit.run();
</script>
</body>
</html>
......@@ -2,7 +2,7 @@
<html>
<head>
<title>
Checking window.audioWorklet
Checking BaseAudioContext.audioWorklet
</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
......@@ -16,30 +16,18 @@
let audit = Audit.createTaskRunner();
let realtimeContext = new AudioContext();
let offlineContext = new OfflineAudioContext(1, 1, 44100);
// Test if AudioWorklet exists.
audit.define('Test if AudioWorklet exists', (task, should) => {
should(window.audioWorklet instanceof Worklet,
'window.audioWorklet is an instance of Worklet')
should(realtimeContext.audioWorklet instanceof AudioWorklet &&
offlineContext.audioWorklet instanceof AudioWorklet,
'BaseAudioContext.audioWorklet is an instance of AudioWorklet')
.beTrue();
task.done();
});
// Test the construction of BaseAudioContext before |worklet.addModule()|.
audit.define(
'Test invocation of addModule() after BaseAudioContext construction',
(task, should) => {
should(
() => {
let context = new AudioContext();
audioWorklet.addModule('bypass-processor.js');
},
'Calling audioWorklet.addModule() before construction of ' +
'BaseAudioContext')
.notThrow();
task.done();
});
audit.run();
</script>
</body>
......
......@@ -36,7 +36,7 @@
context.channeCountMode = 'explicit';
context.channelInterpretation = 'discrete';
audioWorklet.addModule('gain-processor.js').then(() => {
context.audioWorklet.addModule('gain-processor.js').then(() => {
let testBuffer = createConstantBuffer(context, 1, testChannelValues);
let sourceNode = new AudioBufferSourceNode(context);
let gainWorkletNode = new AudioWorkletNode(context, 'gain');
......
......@@ -60,7 +60,7 @@
porterWorkletNode.port.postMessage('hello');
});
audioWorklet.addModule('port-processor.js').then(() => {
context.audioWorklet.addModule('port-processor.js').then(() => {
audit.run();
});
</script>
......
......@@ -50,9 +50,12 @@
task.done();
});
audioWorklet.addModule('dummy-processor.js').then(() => {
audit.run();
});
Promise.all([
realtimeContext.audioWorklet.addModule('dummy-processor.js'),
offlineContext.audioWorklet.addModule('dummy-processor.js')
]).then(() => {
audit.run();
});
</script>
</body>
</html>
......@@ -7,14 +7,18 @@
<script src="/resources/get-host-info.js"></script>
<script>
// This test should not be upstreamed to WPT because Worklets are now restrected
// This test should not be upstreamed to WPT because Worklets are now restricted
// to secure contexts by Chrome's policy.
const realTimeContext = new AudioContext();
const offlineContext = new OfflineAudioContext(1, 1, 44100);
if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) {
test(t => {
assert_not_equals(undefined, CSS.paintWorklet);
assert_not_equals(undefined, window.animationWorklet);
assert_not_equals(undefined, window.audioWorklet);
assert_not_equals(undefined, realTimeContext.audioWorklet);
assert_not_equals(undefined, offlineContext.audioWorklet);
}, 'Worklets should be available on a secure context.');
window.location = get_host_info().UNAUTHENTICATED_ORIGIN +
window.location.pathname;
......@@ -22,7 +26,8 @@ if (window.location.origin != get_host_info().UNAUTHENTICATED_ORIGIN) {
test(t => {
assert_equals(undefined, CSS.paintWorklet);
assert_equals(undefined, window.animationWorklet);
assert_equals(undefined, window.audioWorklet);
assert_equals(undefined, realTimeContext.audioWorklet);
assert_equals(undefined, offlineContext.audioWorklet);
}, 'Worklets should not be available on an insecure context.');
}
......
......@@ -20,36 +20,37 @@
description: 'Generate warnings if outside nominal range'
},
(task, should) => {
window.audioWorklet.addModule(NoiseGenWorkletUrl).then(() => {
// Any reasonable sample rate will work
let sampleRate = 16000;
let renderTime = 1;
let renderLength = renderTime * sampleRate;
let context =
new OfflineAudioContext(1, renderLength, sampleRate);
let noiseGenerator =
new AudioWorkletNode(context, 'noise-generator');
noiseGenerator.connect(context.destination);
let param = noiseGenerator.parameters.get('amplitude');
// Set the value inside the nominal range; no warning should be
// generated.
param.value = .1;
// Set the value outside the nominal range to generate a
// warning.
param.value = 99;
// Any reasonable sample rate will work
let sampleRate = 16000;
let renderTime = 1;
let renderLength = renderTime * sampleRate;
let context =
new OfflineAudioContext(1, renderLength, sampleRate);
context.audioWorklet.addModule(NoiseGenWorkletUrl).then(() => {
let noiseGenerator =
new AudioWorkletNode(context, 'noise-generator');
noiseGenerator.connect(context.destination);
let param = noiseGenerator.parameters.get('amplitude');
// Set the value inside the nominal range; no warning should be
// generated.
param.value = .1;
// Set the value outside the nominal range to generate a
// warning.
param.value = 99;
// Set up automation outside the nominal range to generate a
// warning.
param.setValueAtTime(-1, renderTime / 4);
param.linearRampToValueAtTime(5, renderTime);
// Set up automation outside the nominal range to generate a
// warning.
param.setValueAtTime(-1, renderTime / 4);
param.linearRampToValueAtTime(5, renderTime);
// Render; we don't care what the generated result is.
context.startRendering()
.then(() => {
should(true, 'Rendering succeeded').beTrue();
})
.then(() => task.done());
})});
// Render; we don't care what the generated result is.
context.startRendering()
.then(() => {
should(true, 'Rendering succeeded').beTrue();
})
.then(() => task.done());
});
});
audit.run();
</script>
......
......@@ -7,6 +7,7 @@
let BaseAudioContextOwnProperties = [
'audioWorklet',
'constructor',
'createAnalyser',
'createBiquadFilter',
......
......@@ -402,6 +402,9 @@ interface AudioTrackList : EventTarget
setter onaddtrack
setter onchange
setter onremovetrack
interface AudioWorklet : Worklet
attribute @@toStringTag
method constructor
interface AudioWorkletNode : AudioNode
attribute @@toStringTag
method constructor
......@@ -451,6 +454,7 @@ interface BarcodeDetector
method detect
interface BaseAudioContext : EventTarget
attribute @@toStringTag
getter audioWorklet
getter currentTime
getter destination
getter listener
......@@ -9154,7 +9158,6 @@ interface webkitURL
attribute window
getter animationWorklet
getter applicationCache
getter audioWorklet
getter caches
getter clientInformation
getter cookieStore
......
......@@ -329,6 +329,7 @@ modules_idl_files =
"webaudio/AudioParamMap.idl",
"webaudio/AudioProcessingEvent.idl",
"webaudio/AudioScheduledSourceNode.idl",
"webaudio/AudioWorklet.idl",
"webaudio/AudioWorkletGlobalScope.idl",
"webaudio/AudioWorkletProcessor.idl",
"webaudio/AudioWorkletNode.idl",
......@@ -722,7 +723,6 @@ modules_dependency_idl_files =
"vibration/NavigatorVibration.idl",
"vr/NavigatorVR.idl",
"wake_lock/ScreenWakeLock.idl",
"webaudio/WindowAudioWorklet.idl",
"webdatabase/WindowWebDatabase.idl",
"webgl/WebGL2RenderingContextBase.idl",
"webgl/WebGLRenderingContextBase.idl",
......
......@@ -6,77 +6,90 @@
#include "bindings/core/v8/V8BindingForCore.h"
#include "core/dom/Document.h"
#include "core/frame/LocalDOMWindow.h"
#include "core/frame/LocalFrame.h"
#include "core/workers/WorkerClients.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/CrossThreadAudioWorkletProcessorInfo.h"
namespace blink {
AudioWorklet* AudioWorklet::Create(LocalFrame* frame) {
return new AudioWorklet(frame);
AudioWorklet* AudioWorklet::Create(BaseAudioContext* context) {
return RuntimeEnabledFeatures::AudioWorkletEnabled()
? new AudioWorklet(context)
: nullptr;
}
AudioWorklet::AudioWorklet(LocalFrame* frame) : Worklet(frame) {}
AudioWorklet::AudioWorklet(BaseAudioContext* context)
: Worklet(context->GetExecutionContext()->ExecutingWindow()->GetFrame()),
context_(context) {}
AudioWorklet::~AudioWorklet() {
contexts_.clear();
void AudioWorklet::CreateProcessor(AudioWorkletHandler* handler,
MessagePortChannel message_port_channel) {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
GetMessagingProxy()->CreateProcessor(handler,
std::move(message_port_channel));
}
void AudioWorklet::RegisterContext(BaseAudioContext* context) {
DCHECK(!contexts_.Contains(context));
contexts_.insert(context);
void AudioWorklet::NotifyGlobalScopeIsUpdated() {
DCHECK(IsMainThread());
// Check if AudioWorklet loads the script and has an active
// AudioWorkletGlobalScope before getting the messaging proxy.
if (IsWorkletMessagingProxyCreated())
context->SetWorkletMessagingProxy(FindAvailableMessagingProxy());
if (!worklet_started_) {
context_->NotifyWorkletIsReady();
worklet_started_ = true;
}
}
void AudioWorklet::UnregisterContext(BaseAudioContext* context) {
// This may be called multiple times from BaseAudioContext.
if (!contexts_.Contains(context))
return;
WebThread* AudioWorklet::GetBackingThread() {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
return GetMessagingProxy()->GetWorkletBackingThread();
}
contexts_.erase(context);
const Vector<CrossThreadAudioParamInfo>
AudioWorklet::GetParamInfoListForProcessor(
const String& name) {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
return GetMessagingProxy()->GetParamInfoListForProcessor(name);
}
AudioWorkletMessagingProxy* AudioWorklet::FindAvailableMessagingProxy() {
return static_cast<AudioWorkletMessagingProxy*>(FindAvailableGlobalScope());
bool AudioWorklet::IsProcessorRegistered(const String& name) {
DCHECK(IsMainThread());
DCHECK(GetMessagingProxy());
return GetMessagingProxy()->IsProcessorRegistered(name);
}
bool AudioWorklet::IsWorkletMessagingProxyCreated() const {
return GetNumberOfGlobalScopes() > 0;
bool AudioWorklet::IsReady() {
DCHECK(IsMainThread());
return GetMessagingProxy() && GetBackingThread();
}
bool AudioWorklet::NeedsToCreateGlobalScope() {
// TODO(hongchan): support multiple WorkletGlobalScopes, one scope per a
// BaseAudioContext. In order to do it, FindAvailableGlobalScope() needs to
// be inherited and rewritten.
return GetNumberOfGlobalScopes() == 0;
}
WorkletGlobalScopeProxy* AudioWorklet::CreateGlobalScope() {
DCHECK(NeedsToCreateGlobalScope());
WorkerClients* worker_clients = WorkerClients::Create();
AudioWorkletMessagingProxy* proxy =
new AudioWorkletMessagingProxy(GetExecutionContext(), worker_clients);
new AudioWorkletMessagingProxy(GetExecutionContext(),
WorkerClients::Create(),
this);
proxy->Initialize();
for (BaseAudioContext* context : contexts_) {
// TODO(hongchan): Currently all BaseAudioContexts shares a single
// AudioWorkletMessagingProxy. Fix this to support one messaging proxy for
// each BaseAudioContext.
if (!context->HasWorkletMessagingProxy())
context->SetWorkletMessagingProxy(proxy);
}
return proxy;
}
AudioWorkletMessagingProxy* AudioWorklet::GetMessagingProxy() {
return NeedsToCreateGlobalScope()
? nullptr
: static_cast<AudioWorkletMessagingProxy*>(FindAvailableGlobalScope());
}
void AudioWorklet::Trace(blink::Visitor* visitor) {
visitor->Trace(contexts_);
visitor->Trace(context_);
Worklet::Trace(visitor);
}
......
......@@ -11,36 +11,59 @@
namespace blink {
class AudioWorkletHandler;
class AudioWorkletMessagingProxy;
class BaseAudioContext;
class LocalFrame;
class CrossThreadAudioParamInfo;
class MessagePortChannel;
class MODULES_EXPORT AudioWorklet final : public Worklet {
DEFINE_WRAPPERTYPEINFO();
USING_GARBAGE_COLLECTED_MIXIN(AudioWorklet);
WTF_MAKE_NONCOPYABLE(AudioWorklet);
public:
static AudioWorklet* Create(LocalFrame*);
~AudioWorklet() override;
// When the AudioWorklet runtime flag is not enabled, this constructor returns
// |nullptr|.
static AudioWorklet* Create(BaseAudioContext*);
void RegisterContext(BaseAudioContext*);
void UnregisterContext(BaseAudioContext*);
~AudioWorklet() = default;
AudioWorkletMessagingProxy* FindAvailableMessagingProxy();
void CreateProcessor(AudioWorkletHandler*, MessagePortChannel);
virtual void Trace(blink::Visitor*);
// Invoked by AudioWorkletMessagingProxy. Notifies |context_| when
// AudioWorkletGlobalScope finishes the first script evaluation and is ready
// for the worklet operation. Can be used for other post-evaluation tasks
// in AudioWorklet or BaseAudioContext.
void NotifyGlobalScopeIsUpdated();
WebThread* GetBackingThread();
const Vector<CrossThreadAudioParamInfo> GetParamInfoListForProcessor(
const String& name);
bool IsProcessorRegistered(const String& name);
// Returns |true| when a AudioWorkletMessagingProxy and a WorkletBackingThread
// are ready.
bool IsReady();
void Trace(blink::Visitor*) override;
private:
explicit AudioWorklet(LocalFrame*);
explicit AudioWorklet(BaseAudioContext*);
// Implements Worklet.
// Implements Worklet
bool NeedsToCreateGlobalScope() final;
WorkletGlobalScopeProxy* CreateGlobalScope() final;
bool IsWorkletMessagingProxyCreated() const;
// Returns |nullptr| if there is no active WorkletGlobalScope().
AudioWorkletMessagingProxy* GetMessagingProxy();
// To catch the first global scope update and notify the context.
bool worklet_started_ = false;
// AudioWorklet keeps the reference of all active BaseAudioContexts, so it
// can notify the contexts when a script is loaded in AudioWorkletGlobalScope.
HeapHashSet<Member<BaseAudioContext>> contexts_;
Member<BaseAudioContext> context_;
};
} // namespace blink
......
......@@ -5,9 +5,7 @@
// https://webaudio.github.io/web-audio-api/#AudioWorklet
[
ImplementedAs=WindowAudioWorklet,
RuntimeEnabled=AudioWorklet,
SecureContext
] partial interface Window {
readonly attribute Worklet audioWorklet;
] interface AudioWorklet : Worklet {
};
......@@ -5,6 +5,7 @@
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "core/dom/MessagePort.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletGlobalScope.h"
#include "modules/webaudio/AudioWorkletNode.h"
#include "modules/webaudio/AudioWorkletObjectProxy.h"
......@@ -17,10 +18,10 @@ namespace blink {
AudioWorkletMessagingProxy::AudioWorkletMessagingProxy(
ExecutionContext* execution_context,
WorkerClients* worker_clients)
: ThreadedWorkletMessagingProxy(execution_context, worker_clients) {}
AudioWorkletMessagingProxy::~AudioWorkletMessagingProxy() {}
WorkerClients* worker_clients,
AudioWorklet* worklet)
: ThreadedWorkletMessagingProxy(execution_context, worker_clients),
worklet_(worklet) {}
void AudioWorkletMessagingProxy::CreateProcessor(
AudioWorkletHandler* handler,
......@@ -61,6 +62,10 @@ void AudioWorkletMessagingProxy::SynchronizeWorkletProcessorInfoList(
processor_info_map_.insert(processor_info.Name(),
processor_info.ParamInfoList());
}
// Notify AudioWorklet object that the global scope has been updated after the
// script evaluation.
worklet_->NotifyGlobalScopeIsUpdated();
}
bool AudioWorkletMessagingProxy::IsProcessorRegistered(
......@@ -94,4 +99,10 @@ std::unique_ptr<WorkerThread> AudioWorkletMessagingProxy::CreateWorkerThread() {
WorkletObjectProxy());
}
void AudioWorkletMessagingProxy::Trace(Visitor* visitor) {
visitor->Trace(worklet_);
ThreadedWorkletMessagingProxy::Trace(visitor);
}
} // namespace blink
......@@ -10,6 +10,7 @@
namespace blink {
class AudioWorklet;
class AudioWorkletHandler;
class CrossThreadAudioParamInfo;
class CrossThreadAudioWorkletProcessorInfo;
......@@ -23,7 +24,7 @@ class WorkerThread;
// scope via AudioWorkletObjectProxy.
class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
public:
AudioWorkletMessagingProxy(ExecutionContext*, WorkerClients*);
AudioWorkletMessagingProxy(ExecutionContext*, WorkerClients*, AudioWorklet*);
// Since the creation of AudioWorkletProcessor needs to be done in the
// different thread, this method is a wrapper for cross-thread task posting.
......@@ -54,9 +55,9 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
WebThread* GetWorkletBackingThread();
private:
~AudioWorkletMessagingProxy() override;
void Trace(Visitor*);
private:
// Implements ThreadedWorkletMessagingProxy.
std::unique_ptr<ThreadedWorkletObjectProxy> CreateObjectProxy(
ThreadedWorkletMessagingProxy*,
......@@ -66,6 +67,8 @@ class AudioWorkletMessagingProxy final : public ThreadedWorkletMessagingProxy {
// Each entry consists of processor name and associated AudioParam list.
HashMap<String, Vector<CrossThreadAudioParamInfo>> processor_info_map_;
Member<AudioWorklet> worklet_;
};
} // namespace blink
......
......@@ -11,8 +11,7 @@
#include "modules/webaudio/AudioNodeInput.h"
#include "modules/webaudio/AudioNodeOutput.h"
#include "modules/webaudio/AudioParamDescriptor.h"
#include "modules/webaudio/AudioWorkletGlobalScope.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/AudioWorkletProcessor.h"
#include "modules/webaudio/AudioWorkletProcessorDefinition.h"
#include "modules/webaudio/CrossThreadAudioWorkletProcessorInfo.h"
......@@ -277,7 +276,7 @@ AudioWorkletNode* AudioWorkletNode::Create(
}
}
if (!context->HasWorkletMessagingProxy()) {
if (!context->audioWorklet()->IsReady()) {
exception_state.ThrowDOMException(
kInvalidStateError,
"AudioWorkletNode cannot be created: AudioWorklet does not have a "
......@@ -286,9 +285,7 @@ AudioWorkletNode* AudioWorkletNode::Create(
return nullptr;
}
AudioWorkletMessagingProxy* proxy = context->WorkletMessagingProxy();
if (!proxy->IsProcessorRegistered(name)) {
if (!context->audioWorklet()->IsProcessorRegistered(name)) {
exception_state.ThrowDOMException(
kInvalidStateError,
"AudioWorkletNode cannot be created: The node name '" + name +
......@@ -302,8 +299,8 @@ AudioWorkletNode* AudioWorkletNode::Create(
AudioWorkletNode* node =
new AudioWorkletNode(*context, name, options,
proxy->GetParamInfoListForProcessor(name),
channel->port1());
context->audioWorklet()->GetParamInfoListForProcessor(name),
channel->port1());
if (!node) {
exception_state.ThrowDOMException(
......@@ -319,8 +316,8 @@ AudioWorkletNode* AudioWorkletNode::Create(
// This is non-blocking async call. |node| still can be returned to user
// before the scheduled async task is completed.
proxy->CreateProcessor(&node->GetWorkletHandler(),
std::move(processor_port_channel));
context->audioWorklet()->CreateProcessor(&node->GetWorkletHandler(),
std::move(processor_port_channel));
return node;
}
......
......@@ -125,19 +125,9 @@ blink_modules_sources("webaudio") {
"WaveShaperNode.h",
"WaveShaperProcessor.cpp",
"WaveShaperProcessor.h",
"WindowAudioWorklet.cpp",
"WindowAudioWorklet.h",
]
if (is_win) {
jumbo_excluded_sources = [
# Uses Supplement<LocalDOMWindow> with MODULES_EXPORT while
# other files use Supplement<LocalDOMWindow> with
# CORE_EXPORT. Mixing those in the same compilation unit
# triggers link errors in Windows. https://crbug.com/739340
"WindowAudioWorklet.cpp",
]
# Result of 32-bit shift implicitly converted to 64 bits.
cflags = [ "/wd4334" ]
}
......
......@@ -71,7 +71,6 @@
#include "modules/webaudio/ScriptProcessorNode.h"
#include "modules/webaudio/StereoPannerNode.h"
#include "modules/webaudio/WaveShaperNode.h"
#include "modules/webaudio/WindowAudioWorklet.h"
#include "platform/CrossThreadFunctional.h"
#include "platform/Histogram.h"
#include "platform/audio/IIRFilter.h"
......@@ -137,9 +136,6 @@ BaseAudioContext::BaseAudioContext(Document* document,
BaseAudioContext::~BaseAudioContext() {
GetDeferredTaskHandler().ContextWillBeDestroyed();
// AudioNodes keep a reference to their context, so there should be no way to
// be in the destructor if there are still AudioNodes around.
DCHECK(!IsDestinationInitialized());
DCHECK(!active_source_nodes_.size());
DCHECK(!is_resolving_resume_promises_);
DCHECK(!resume_resolvers_.size());
......@@ -152,21 +148,16 @@ void BaseAudioContext::Initialize() {
FFTFrame::Initialize();
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
audio_worklet_ = AudioWorklet::Create(this);
}
if (destination_node_) {
destination_node_->Handler().Initialize();
// The AudioParams in the listener need access to the destination node, so
// only create the listener if the destination node exists.
listener_ = AudioListener::Create(*this);
}
// Check if a document or a frame supports AudioWorklet. If not, AudioWorklet
// cannot be accessed.
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
AudioWorklet* audioWorklet = WindowAudioWorklet::audioWorklet(
*GetExecutionContext()->ExecutingWindow());
if (audioWorklet)
audioWorklet->RegisterContext(this);
}
}
void BaseAudioContext::Clear() {
......@@ -180,17 +171,6 @@ void BaseAudioContext::Clear() {
void BaseAudioContext::Uninitialize() {
DCHECK(IsMainThread());
// AudioWorklet may be destroyed before the context goes away. So we have to
// check the pointer. See: crbug.com/503845
if (RuntimeEnabledFeatures::AudioWorkletEnabled()) {
AudioWorklet* audioWorklet = WindowAudioWorklet::audioWorklet(
*GetExecutionContext()->ExecutingWindow());
if (audioWorklet) {
audioWorklet->UnregisterContext(this);
worklet_messaging_proxy_.Clear();
}
}
if (!IsDestinationInitialized())
return;
......@@ -1012,7 +992,7 @@ void BaseAudioContext::Trace(blink::Visitor* visitor) {
visitor->Trace(periodic_wave_square_);
visitor->Trace(periodic_wave_sawtooth_);
visitor->Trace(periodic_wave_triangle_);
visitor->Trace(worklet_messaging_proxy_);
visitor->Trace(audio_worklet_);
EventTargetWithInlineData::Trace(visitor);
PausableObject::Trace(visitor);
}
......@@ -1038,15 +1018,12 @@ SecurityOrigin* BaseAudioContext::GetSecurityOrigin() const {
return nullptr;
}
bool BaseAudioContext::HasWorkletMessagingProxy() const {
return has_worklet_messaging_proxy_;
AudioWorklet* BaseAudioContext::audioWorklet() const {
return audio_worklet_.Get();
}
void BaseAudioContext::SetWorkletMessagingProxy(
AudioWorkletMessagingProxy* proxy) {
DCHECK(!worklet_messaging_proxy_);
worklet_messaging_proxy_ = proxy;
has_worklet_messaging_proxy_ = true;
void BaseAudioContext::NotifyWorkletIsReady() {
DCHECK(audioWorklet()->IsReady());
// If the context is running or suspended, restart the destination to switch
// the render thread with the worklet thread. Note that restarting can happen
......@@ -1056,9 +1033,4 @@ void BaseAudioContext::SetWorkletMessagingProxy(
}
}
AudioWorkletMessagingProxy* BaseAudioContext::WorkletMessagingProxy() {
DCHECK(worklet_messaging_proxy_);
return worklet_messaging_proxy_;
}
} // namespace blink
......@@ -55,7 +55,7 @@ class AudioBuffer;
class AudioBufferSourceNode;
class AudioContextOptions;
class AudioListener;
class AudioWorkletMessagingProxy;
class AudioWorklet;
class BiquadFilterNode;
class ChannelMergerNode;
class ChannelSplitterNode;
......@@ -334,9 +334,13 @@ class MODULES_EXPORT BaseAudioContext
// gesture while the AudioContext requires a user gesture.
void MaybeRecordStartAttempt();
void SetWorkletMessagingProxy(AudioWorkletMessagingProxy*);
AudioWorkletMessagingProxy* WorkletMessagingProxy();
bool HasWorkletMessagingProxy() const;
// AudioWorklet IDL
AudioWorklet* audioWorklet() const;
// Callback from AudioWorklet, invoked when the associated
// AudioWorkletGlobalScope is created and the worklet operation is ready after
// the first script evaluation.
void NotifyWorkletIsReady();
// TODO(crbug.com/764396): Remove this when fixed.
virtual void CountValueSetterConflict(bool does_conflict){};
......@@ -518,8 +522,7 @@ class MODULES_EXPORT BaseAudioContext
Optional<AutoplayStatus> autoplay_status_;
AudioIOPosition output_position_;
bool has_worklet_messaging_proxy_ = false;
Member<AudioWorkletMessagingProxy> worklet_messaging_proxy_;
Member<AudioWorklet> audio_worklet_;
};
} // namespace blink
......
......@@ -68,5 +68,7 @@ callback DecodeSuccessCallback = void (AudioBuffer decodedData);
[RaisesException, MeasureAs=AudioContextCreateMediaStreamSource] MediaStreamAudioSourceNode createMediaStreamSource(MediaStream mediaStream);
[RaisesException, MeasureAs=AudioContextCreateMediaStreamDestination] MediaStreamAudioDestinationNode createMediaStreamDestination();
[RuntimeEnabled=AudioWorklet, SecureContext] readonly attribute AudioWorklet audioWorklet;
attribute EventHandler onstatechange;
};
......@@ -28,8 +28,8 @@
#include "bindings/core/v8/ExceptionMessages.h"
#include "bindings/core/v8/ExceptionState.h"
#include "core/dom/ExceptionCode.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
namespace blink {
......@@ -89,13 +89,12 @@ void DefaultAudioDestinationHandler::CreateDestination() {
void DefaultAudioDestinationHandler::StartDestination() {
DCHECK(!destination_->IsPlaying());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and
// there is an active AudioWorkletGlobalScope.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() &&
Context()->HasWorkletMessagingProxy()) {
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
// the worklet thread and the global scope are ready.
if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
destination_->StartWithWorkletThread(
Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
Context()->audioWorklet()->GetBackingThread());
} else {
destination_->Start();
}
......
......@@ -28,8 +28,7 @@
#include <algorithm>
#include "modules/webaudio/AudioNodeInput.h"
#include "modules/webaudio/AudioNodeOutput.h"
#include "modules/webaudio/AudioWorkletMessagingProxy.h"
#include "modules/webaudio/AudioWorkletThread.h"
#include "modules/webaudio/AudioWorklet.h"
#include "modules/webaudio/BaseAudioContext.h"
#include "modules/webaudio/OfflineAudioContext.h"
#include "platform/CrossThreadFunctional.h"
......@@ -146,13 +145,10 @@ void OfflineAudioDestinationHandler::InitializeOfflineRenderThread(
AudioBuffer* render_target) {
DCHECK(IsMainThread());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and
// there is an active AudioWorkletGlobalScope.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() &&
Context()->HasWorkletMessagingProxy()) {
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
worklet_backing_thread_ =
Context()->WorkletMessagingProxy()->GetWorkletBackingThread();
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
// the worklet thread and the global scope are ready.
if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
worklet_backing_thread_ = Context()->audioWorklet()->GetBackingThread();
} else {
render_thread_ =
Platform::Current()->CreateThread("offline audio renderer");
......@@ -365,10 +361,9 @@ bool OfflineAudioDestinationHandler::RenderIfNotSuspended(
WebThread* OfflineAudioDestinationHandler::GetRenderingThread() {
DCHECK(IsInitialized());
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled and
// there is an active AudioWorkletGlobalScope.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() &&
Context()->HasWorkletMessagingProxy()) {
// Use Experimental AudioWorkletThread only when AudioWorklet is enabled, and
// the worklet thread and the global scope are ready.
if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady()) {
DCHECK(!render_thread_ && worklet_backing_thread_);
return worklet_backing_thread_;
}
......@@ -381,12 +376,9 @@ void OfflineAudioDestinationHandler::RestartRendering() {
// If the worklet thread is not assigned yet, that means the context has
// started without a valid WorkletGlobalScope. Assign the worklet thread,
// and it will be picked up when the GetRenderingThread() is called next.
if (RuntimeEnabledFeatures::AudioWorkletEnabled() &&
Context()->HasWorkletMessagingProxy() &&
if (Context()->audioWorklet() && Context()->audioWorklet()->IsReady() &&
!worklet_backing_thread_) {
DCHECK(Context()->WorkletMessagingProxy()->GetWorkletBackingThread());
worklet_backing_thread_ =
Context()->WorkletMessagingProxy()->GetWorkletBackingThread();
worklet_backing_thread_ = Context()->audioWorklet()->GetBackingThread();
}
};
......
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "modules/webaudio/WindowAudioWorklet.h"
#include "core/dom/Document.h"
#include "core/frame/LocalDOMWindow.h"
#include "core/frame/LocalFrame.h"
namespace blink {
AudioWorklet* WindowAudioWorklet::audioWorklet(LocalDOMWindow& window) {
if (!window.GetFrame())
return nullptr;
return From(window).audio_worklet_.Get();
}
// Break the following cycle when the context gets detached.
// Otherwise, the worklet object will leak.
//
// window => window.audioWorklet
// => WindowAudioWorklet
// => AudioWorklet <--- break this reference
// => ThreadedWorkletMessagingProxy
// => Document
// => ... => window
void WindowAudioWorklet::ContextDestroyed(ExecutionContext*) {
audio_worklet_ = nullptr;
}
void WindowAudioWorklet::Trace(blink::Visitor* visitor) {
visitor->Trace(audio_worklet_);
Supplement<LocalDOMWindow>::Trace(visitor);
ContextLifecycleObserver::Trace(visitor);
}
WindowAudioWorklet& WindowAudioWorklet::From(LocalDOMWindow& window) {
WindowAudioWorklet* supplement = static_cast<WindowAudioWorklet*>(
Supplement<LocalDOMWindow>::From(window, SupplementName()));
if (!supplement) {
supplement = new WindowAudioWorklet(window);
ProvideTo(window, SupplementName(), supplement);
}
return *supplement;
}
WindowAudioWorklet::WindowAudioWorklet(LocalDOMWindow& window)
: ContextLifecycleObserver(window.GetFrame()->GetDocument()),
audio_worklet_(AudioWorklet::Create(window.GetFrame())) {
DCHECK(GetExecutionContext());
}
const char* WindowAudioWorklet::SupplementName() {
return "WindowAudioWorklet";
}
} // namespace blink
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WindowAudioWorklet_h
#define WindowAudioWorklet_h
#include "core/dom/ContextLifecycleObserver.h"
#include "modules/ModulesExport.h"
#include "modules/webaudio/AudioWorklet.h"
#include "platform/Supplementable.h"
#include "platform/heap/Handle.h"
namespace blink {
class LocalDOMWindow;
class MODULES_EXPORT WindowAudioWorklet final
: public GarbageCollected<WindowAudioWorklet>,
public Supplement<LocalDOMWindow>,
public ContextLifecycleObserver {
USING_GARBAGE_COLLECTED_MIXIN(WindowAudioWorklet);
public:
static AudioWorklet* audioWorklet(LocalDOMWindow&);
void ContextDestroyed(ExecutionContext*) override;
void Trace(blink::Visitor*);
private:
static WindowAudioWorklet& From(LocalDOMWindow&);
explicit WindowAudioWorklet(LocalDOMWindow&);
static const char* SupplementName();
Member<AudioWorklet> audio_worklet_;
};
} // namespace blink
#endif // WindowAudioWorklet_h
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment