Commit 5cc93793 authored by Hongchan Choi's avatar Hongchan Choi Committed by Commit Bot

Add EventListenerBreakPoints for AudioContext operation

This CL adds EventListenerBreakPoints for the AudioContext constructor,
AudioContext.close(), AudioContext.resume() and AudioContext.suspend().

Bug: 848473
Change-Id: Ifbbccfd1e21dc8b5b444993c7143e869a56d7e6c
Reviewed-on: https://chromium-review.googlesource.com/1089200
Commit-Queue: Hongchan Choi <hongchan@chromium.org>
Reviewed-by: default avatarRaymond Toy <rtoy@chromium.org>
Reviewed-by: default avatarAndrey Kosyakov <caseq@chromium.org>
Reviewed-by: default avatarAleksey Kozyatinskiy <kozyatinskiy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#568197}
parent b414bf17
Tests event listener breakpoints for WebAudio.
Script execution paused.
Successfully paused after AudioContext construction.
Script execution resumed.
Script execution paused.
Successfully paused after AudioContext suspension.
Script execution resumed.
Script execution paused.
Successfully paused after AudioContext resumption.
Script execution resumed.
Script execution paused.
Successfully paused after AudioContext closure.
Script execution resumed.
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(async function() {
TestRunner.addResult(`Tests event listener breakpoints for WebAudio.\n`);
await TestRunner.loadModule('sources_test_runner');
await TestRunner.showPanel('sources');
await TestRunner.evaluateInPagePromise(`
let audioContext;
`);
SourcesTestRunner.startDebuggerTest(audioContextCreated);
function audioContextCreated() {
SourcesTestRunner.setEventListenerBreakpoint(
'instrumentation:audioContextCreated', true);
SourcesTestRunner.waitUntilPaused(() => {
TestRunner.addResult('Successfully paused after AudioContext construction.');
SourcesTestRunner.resumeExecution(audioContextSuspended);
});
TestRunner.evaluateInPageWithTimeout('audioContext = new AudioContext()');
}
function audioContextSuspended() {
SourcesTestRunner.setEventListenerBreakpoint(
'instrumentation:audioContextSuspended', true);
SourcesTestRunner.waitUntilPaused(() => {
TestRunner.addResult('Successfully paused after AudioContext suspension.');
SourcesTestRunner.resumeExecution(audioContextResumed);
});
TestRunner.evaluateInPageWithTimeout('audioContext.suspend()');
}
function audioContextResumed() {
SourcesTestRunner.setEventListenerBreakpoint(
'instrumentation:audioContextResumed', true);
SourcesTestRunner.waitUntilPaused(() => {
TestRunner.addResult('Successfully paused after AudioContext resumption.');
SourcesTestRunner.resumeExecution(audioContextClosed);
});
TestRunner.evaluateInPageWithTimeout('audioContext.resume()');
}
function audioContextClosed() {
SourcesTestRunner.setEventListenerBreakpoint(
'instrumentation:audioContextClosed', true);
SourcesTestRunner.waitUntilPaused(() => {
TestRunner.addResult('Successfully paused after AudioContext closure.');
SourcesTestRunner.resumeExecution(finish);
});
TestRunner.evaluateInPageWithTimeout('audioContext.close()');
}
function finish() {
SourcesTestRunner.completeDebuggerTest();
}
})();
\ No newline at end of file
......@@ -71,6 +71,10 @@ static const char kWebglWarningFiredEventName[] = "webglWarningFired";
static const char kWebglErrorNameProperty[] = "webglErrorName";
static const char kScriptBlockedByCSPEventName[] = "scriptBlockedByCSP";
static const char kCanvasContextCreatedEventName[] = "canvasContextCreated";
static const char kAudioContextCreatedEventName[] = "audioContextCreated";
static const char kAudioContextClosedEventName[] = "audioContextClosed";
static const char kAudioContextResumedEventName[] = "audioContextResumed";
static const char kAudioContextSuspendedEventName[] = "audioContextSuspended";
namespace DOMDebuggerAgentState {
static const char kEventListenerBreakpoints[] = "eventListenerBreakpoints";
......@@ -823,4 +827,28 @@ void InspectorDOMDebuggerAgent::DidCommitLoadForLocalFrame(LocalFrame*) {
dom_breakpoints_.clear();
}
void InspectorDOMDebuggerAgent::DidCreateAudioContext() {
PauseOnNativeEventIfNeeded(
PreparePauseOnNativeEventData(kAudioContextCreatedEventName, nullptr),
true);
}
void InspectorDOMDebuggerAgent::DidCloseAudioContext() {
PauseOnNativeEventIfNeeded(
PreparePauseOnNativeEventData(kAudioContextClosedEventName, nullptr),
true);
}
void InspectorDOMDebuggerAgent::DidResumeAudioContext() {
PauseOnNativeEventIfNeeded(
PreparePauseOnNativeEventData(kAudioContextResumedEventName, nullptr),
true);
}
void InspectorDOMDebuggerAgent::DidSuspendAudioContext() {
PauseOnNativeEventIfNeeded(
PreparePauseOnNativeEventData(kAudioContextSuspendedEventName, nullptr),
true);
}
} // namespace blink
......@@ -110,6 +110,10 @@ class CORE_EXPORT InspectorDOMDebuggerAgent final
void Will(const probe::UserCallback&);
void Did(const probe::UserCallback&);
void BreakableLocation(const char* name);
void DidCreateAudioContext();
void DidCloseAudioContext();
void DidResumeAudioContext();
void DidSuspendAudioContext();
protocol::Response disable() override;
void Restore() override;
......
......@@ -81,6 +81,10 @@
"willModifyDOMAttr",
"willRemoveDOMNode",
"willSendXMLHttpOrFetchNetworkRequest",
"didCreateAudioContext",
"didCloseAudioContext",
"didResumeAudioContext",
"didSuspendAudioContext",
]
},
InspectorDOMSnapshotAgent: {
......
......@@ -163,4 +163,8 @@ interface CoreProbes {
void consoleTimeStamp(ExecutionContext*, const String& title);
void lifecycleEvent([Keep] LocalFrame*, DocumentLoader*, const char* name, double timestamp);
void paintTiming([Keep] Document*, const char* name, double timestamp);
void didCreateAudioContext(Document*);
void didCloseAudioContext(Document*);
void didResumeAudioContext(Document*);
void didSuspendAudioContext(Document*);
}
......@@ -588,6 +588,9 @@ SDK.DOMDebuggerManager = class {
Common.UIString('Timer'),
['setTimeout', 'clearTimeout', 'setInterval', 'clearInterval', 'setTimeout.callback', 'setInterval.callback']);
this._createInstrumentationBreakpoints(Common.UIString('Window'), ['DOMWindow.close']);
this._createInstrumentationBreakpoints(
Common.UIString('WebAudio'),
['audioContextCreated', 'audioContextClosed', 'audioContextResumed', 'audioContextSuspended']);
this._createEventListenerBreakpoints(
Common.UIString('Media'),
......@@ -669,6 +672,14 @@ SDK.DOMDebuggerManager = class {
this._resolveEventListenerBreakpoint('instrumentation:Notification.requestPermission')._title = 'requestPermission';
this._resolveEventListenerBreakpoint('instrumentation:DOMWindow.close')._title = 'window.close';
this._resolveEventListenerBreakpoint('instrumentation:Document.write')._title = 'document.write';
this._resolveEventListenerBreakpoint('instrumentation:audioContextCreated')._title =
Common.UIString('Create AudioContext');
this._resolveEventListenerBreakpoint('instrumentation:audioContextClosed')._title =
Common.UIString('Close AudioContext');
this._resolveEventListenerBreakpoint('instrumentation:audioContextResumed')._title =
Common.UIString('Resume AudioContext');
this._resolveEventListenerBreakpoint('instrumentation:audioContextSuspended')._title =
Common.UIString('Suspend AudioContext');
SDK.targetManager.observeModels(SDK.DOMDebuggerModel, this);
}
......
......@@ -11,6 +11,7 @@
#include "third_party/blink/renderer/core/frame/local_dom_window.h"
#include "third_party/blink/renderer/core/frame/use_counter.h"
#include "third_party/blink/renderer/core/inspector/console_message.h"
#include "third_party/blink/renderer/core/probe/core_probes.h"
#include "third_party/blink/renderer/core/timing/dom_window_performance.h"
#include "third_party/blink/renderer/core/timing/window_performance.h"
#include "third_party/blink/renderer/modules/webaudio/audio_context_options.h"
......@@ -105,6 +106,8 @@ AudioContext* AudioContext::Create(Document& document,
"https://goo.gl/7K7WLu"));
}
probe::didCreateAudioContext(&document);
return audio_context;
}
......@@ -173,6 +176,9 @@ ScriptPromise AudioContext::suspendContext(ScriptState* script_state) {
// Since we don't have any way of knowing when the hardware actually stops,
// we'll just resolve the promise now.
resolver->Resolve();
// Probe reports the suspension only when the promise is resolved.
probe::didSuspendAudioContext(GetDocument());
}
return promise;
......@@ -204,6 +210,9 @@ ScriptPromise AudioContext::resumeContext(ScriptState* script_state) {
// Do not set the state to running here. We wait for the
// destination to start to set the state.
StartRendering();
// Probe reports only when the user gesture allows the audio rendering.
probe::didResumeAudioContext(GetDocument());
}
}
......@@ -268,6 +277,8 @@ ScriptPromise AudioContext::closeContext(ScriptState* script_state) {
// here.
Uninitialize();
probe::didCloseAudioContext(GetDocument());
return promise;
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment