Commit b2b54b08 authored by Raymond Toy's avatar Raymond Toy Committed by Commit Bot

Move resume from BaseAudioContext to its subclasses

The WebAudio spec moved the resume method from the BaseAudioContext to
its subclasses AudioContext and OfflineAudioContext.

Update the idl and implementation to reflect this change.  This change
causes the following tests to fail:

AudioContext/audiocontext-properties.html
BaseAudioContext/baseaudiocontext-properties.html
OfflineAudioContext/offlineaudiocontext-properties.html

We remove these tests because they basically duplicate the test in
external/wpt/webaudio/idlharness.https.window.html.

Bug: 889663, 888870
Test: already covered by existing tests
Change-Id: Ic7ed8a5f950e8d069ac7c916d716526f96bf3c18
Reviewed-on: https://chromium-review.googlesource.com/1247703Reviewed-by: default avatarHongchan Choi <hongchan@chromium.org>
Reviewed-by: default avatarKent Tamura <tkent@chromium.org>
Commit-Queue: Raymond Toy <rtoy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#595213}
parent 75c86c30
...@@ -41,7 +41,7 @@ PASS AudioContext interface: existence and properties of interface prototype obj ...@@ -41,7 +41,7 @@ PASS AudioContext interface: existence and properties of interface prototype obj
PASS AudioContext interface: attribute baseLatency PASS AudioContext interface: attribute baseLatency
FAIL AudioContext interface: attribute outputLatency assert_true: The prototype object must have a property "outputLatency" expected true got false FAIL AudioContext interface: attribute outputLatency assert_true: The prototype object must have a property "outputLatency" expected true got false
PASS AudioContext interface: operation getOutputTimestamp() PASS AudioContext interface: operation getOutputTimestamp()
FAIL AudioContext interface: operation resume() assert_own_property: interface prototype object missing non-static operation expected property "resume" missing PASS AudioContext interface: operation resume()
PASS AudioContext interface: operation suspend() PASS AudioContext interface: operation suspend()
PASS AudioContext interface: operation close() PASS AudioContext interface: operation close()
FAIL AudioContext interface: operation createMediaElementSource(HTMLMediaElement) assert_own_property: interface prototype object missing non-static operation expected property "createMediaElementSource" missing FAIL AudioContext interface: operation createMediaElementSource(HTMLMediaElement) assert_own_property: interface prototype object missing non-static operation expected property "createMediaElementSource" missing
...@@ -104,7 +104,7 @@ PASS OfflineAudioContext interface: existence and properties of interface protot ...@@ -104,7 +104,7 @@ PASS OfflineAudioContext interface: existence and properties of interface protot
PASS OfflineAudioContext interface: existence and properties of interface prototype object's "constructor" property PASS OfflineAudioContext interface: existence and properties of interface prototype object's "constructor" property
PASS OfflineAudioContext interface: existence and properties of interface prototype object's @@unscopables property PASS OfflineAudioContext interface: existence and properties of interface prototype object's @@unscopables property
PASS OfflineAudioContext interface: operation startRendering() PASS OfflineAudioContext interface: operation startRendering()
FAIL OfflineAudioContext interface: operation resume() assert_own_property: interface prototype object missing non-static operation expected property "resume" missing PASS OfflineAudioContext interface: operation resume()
PASS OfflineAudioContext interface: operation suspend(double) PASS OfflineAudioContext interface: operation suspend(double)
PASS OfflineAudioContext interface: attribute length PASS OfflineAudioContext interface: attribute length
PASS OfflineAudioContext interface: attribute oncomplete PASS OfflineAudioContext interface: attribute oncomplete
......
...@@ -127,6 +127,7 @@ interface AudioContext : BaseAudioContext ...@@ -127,6 +127,7 @@ interface AudioContext : BaseAudioContext
method close method close
method constructor method constructor
method getOutputTimestamp method getOutputTimestamp
method resume
method suspend method suspend
interface AudioDestinationNode : AudioNode interface AudioDestinationNode : AudioNode
attribute @@toStringTag attribute @@toStringTag
...@@ -261,7 +262,6 @@ interface BaseAudioContext : EventTarget ...@@ -261,7 +262,6 @@ interface BaseAudioContext : EventTarget
method createStereoPanner method createStereoPanner
method createWaveShaper method createWaveShaper
method decodeAudioData method decodeAudioData
method resume
setter onstatechange setter onstatechange
interface BatteryManager : EventTarget interface BatteryManager : EventTarget
attribute @@toStringTag attribute @@toStringTag
...@@ -4223,6 +4223,7 @@ interface OfflineAudioContext : BaseAudioContext ...@@ -4223,6 +4223,7 @@ interface OfflineAudioContext : BaseAudioContext
getter length getter length
getter oncomplete getter oncomplete
method constructor method constructor
method resume
method startRendering method startRendering
method suspend method suspend
setter oncomplete setter oncomplete
......
<!DOCTYPE html>
<html>
<head>
<title>
Testing AudioContext properties
</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/context-properties.js"></script>
</head>
<body>
<script id="layout-test-code">
let audit = Audit.createTaskRunner();
// Cross-checking two sets of properties: a programmatically generated set
// and a pre-populated set.
audit.define('crosschecking-properties', (task, should) => {
verifyPrototypeOwnProperties(
AudioContext.prototype, AudioContextOwnProperties, should);
task.done();
});
audit.run();
</script>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<title>
Testing BaseAudioContext properties
</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/context-properties.js"></script>
</head>
<body>
<script id="layout-test-code">
let audit = Audit.createTaskRunner();
// Cross-checking two sets of properties: a programmatically generated set
// and a pre-populated set.
audit.define('crosschecking-properties', (task, should) => {
verifyPrototypeOwnProperties(
BaseAudioContext.prototype, BaseAudioContextOwnProperties, should);
task.done();
});
audit.run();
</script>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<title>
Testing OfflineAudioContext properties
</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/context-properties.js"></script>
</head>
<body>
<script id="layout-test-code">
let audit = Audit.createTaskRunner();
// Cross-checking two sets of properties: a programmatically generated set
// and a pre-populated set.
audit.define('crosschecking-properties', (task, should) => {
verifyPrototypeOwnProperties(
OfflineAudioContext.prototype, OfflineAudioContextOwnProperties,
should);
task.done();
});
audit.run();
</script>
</body>
</html>
// The list of the 'own' properties in various AudioContexts. These lists were
// populated by running:
//
// Object.getOwnPropertyNames(FooAudioContext.prototype);
//
// https://webaudio.github.io/web-audio-api/#BaseAudioContext
let BaseAudioContextOwnProperties = [
'audioWorklet',
'constructor',
'createAnalyser',
'createBiquadFilter',
'createBuffer',
'createBufferSource',
'createChannelMerger',
'createChannelSplitter',
'createConstantSource',
'createConvolver',
'createDelay',
'createDynamicsCompressor',
'createGain',
'createIIRFilter',
'createOscillator',
'createPanner',
'createPeriodicWave',
'createScriptProcessor',
'createStereoPanner',
'createWaveShaper',
'currentTime',
'decodeAudioData',
'destination',
'listener',
'onstatechange',
'resume',
'sampleRate',
'state',
// TODO(hongchan): these belong to AudioContext.
'createMediaElementSource',
'createMediaStreamDestination',
'createMediaStreamSource',
];
let AudioContextOwnProperties = [
'close', 'constructor', 'suspend', 'getOutputTimestamp', 'baseLatency',
// TODO(hongchan): Not implemented yet.
// 'outputLatency',
];
let OfflineAudioContextOwnProperties = [
'constructor',
'length',
'oncomplete',
'startRendering',
'suspend',
];
/**
* Verify properties in the prototype with the pre-populated list. This is a
* 2-way comparison to detect the missing and the unexpected property at the
* same time.
* @param {Object} targetPrototype Target prototype.
* @param {Array} populatedList Property dictionary.
* @param {Function} should |Should| assertion function.
* @return {Map} Verification result map.
*/
function verifyPrototypeOwnProperties(targetPrototype, populatedList, should) {
let propertyMap = new Map();
let generatedList = Object.getOwnPropertyNames(targetPrototype);
for (let index in populatedList) {
propertyMap.set(populatedList[index], {actual: false, expected: true});
}
for (let index in generatedList) {
if (propertyMap.has(generatedList[index])) {
propertyMap.get(generatedList[index]).actual = true;
} else {
propertyMap.set(generatedList[index], {actual: true, expected: false});
}
}
for (let [property, result] of propertyMap) {
let prefix = 'The property "' + property + '"';
if (result.expected && result.actual) {
// The test meets the expectation.
should(true, prefix).message('was expected and found successfully', '');
} else if (result.expected && !result.actual) {
// The expected property is missing.
should(false, prefix).message('', 'was expected but not found.');
} else if (!result.expected && result.actual) {
// Something unexpected was found.
should(false, prefix).message('', 'was not expected but found.');
}
}
}
...@@ -297,6 +297,7 @@ interface AudioContext : BaseAudioContext ...@@ -297,6 +297,7 @@ interface AudioContext : BaseAudioContext
method close method close
method constructor method constructor
method getOutputTimestamp method getOutputTimestamp
method resume
method suspend method suspend
interface AudioDestinationNode : AudioNode interface AudioDestinationNode : AudioNode
attribute @@toStringTag attribute @@toStringTag
...@@ -482,7 +483,6 @@ interface BaseAudioContext : EventTarget ...@@ -482,7 +483,6 @@ interface BaseAudioContext : EventTarget
method createStereoPanner method createStereoPanner
method createWaveShaper method createWaveShaper
method decodeAudioData method decodeAudioData
method resume
setter onstatechange setter onstatechange
interface BatteryManager : EventTarget interface BatteryManager : EventTarget
attribute @@toStringTag attribute @@toStringTag
...@@ -4848,6 +4848,7 @@ interface OfflineAudioContext : BaseAudioContext ...@@ -4848,6 +4848,7 @@ interface OfflineAudioContext : BaseAudioContext
getter length getter length
getter oncomplete getter oncomplete
method constructor method constructor
method resume
method startRendering method startRendering
method suspend method suspend
setter oncomplete setter oncomplete
......
...@@ -42,7 +42,7 @@ class MODULES_EXPORT AudioContext : public BaseAudioContext { ...@@ -42,7 +42,7 @@ class MODULES_EXPORT AudioContext : public BaseAudioContext {
bool IsContextClosed() const final; bool IsContextClosed() const final;
ScriptPromise suspendContext(ScriptState*); ScriptPromise suspendContext(ScriptState*);
ScriptPromise resumeContext(ScriptState*) final; ScriptPromise resumeContext(ScriptState*);
bool HasRealtimeConstraint() final { return true; } bool HasRealtimeConstraint() final { return true; }
......
...@@ -40,6 +40,7 @@ enum AudioContextLatencyCategory { ...@@ -40,6 +40,7 @@ enum AudioContextLatencyCategory {
] interface AudioContext : BaseAudioContext { ] interface AudioContext : BaseAudioContext {
[MeasureAs=AudioContextSuspend, CallWith=ScriptState, ImplementedAs=suspendContext] Promise<void> suspend(); [MeasureAs=AudioContextSuspend, CallWith=ScriptState, ImplementedAs=suspendContext] Promise<void> suspend();
[MeasureAs=AudioContextClose, CallWith=ScriptState, ImplementedAs=closeContext] Promise<void> close(); [MeasureAs=AudioContextClose, CallWith=ScriptState, ImplementedAs=closeContext] Promise<void> close();
[MeasureAs=AudioContextResume, CallWith=ScriptState, ImplementedAs=resumeContext] Promise<void> resume();
// Output timestamp // Output timestamp
[MeasureAs=AudioContextGetOutputTimestamp, CallWith=ScriptState] AudioTimestamp getOutputTimestamp(); [MeasureAs=AudioContextGetOutputTimestamp, CallWith=ScriptState] AudioTimestamp getOutputTimestamp();
......
...@@ -220,9 +220,6 @@ class MODULES_EXPORT BaseAudioContext ...@@ -220,9 +220,6 @@ class MODULES_EXPORT BaseAudioContext
const PeriodicWaveConstraints&, const PeriodicWaveConstraints&,
ExceptionState&); ExceptionState&);
// Resume
virtual ScriptPromise resumeContext(ScriptState*) = 0;
// IIRFilter // IIRFilter
IIRFilterNode* createIIRFilter(Vector<double> feedforward_coef, IIRFilterNode* createIIRFilter(Vector<double> feedforward_coef,
Vector<double> feedback_coef, Vector<double> feedback_coef,
......
...@@ -61,9 +61,6 @@ callback DecodeSuccessCallback = void (AudioBuffer decodedData); ...@@ -61,9 +61,6 @@ callback DecodeSuccessCallback = void (AudioBuffer decodedData);
[RaisesException, MeasureAs=AudioContextCreateChannelSplitter] ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs); [RaisesException, MeasureAs=AudioContextCreateChannelSplitter] ChannelSplitterNode createChannelSplitter(optional unsigned long numberOfOutputs);
[RaisesException, MeasureAs=AudioContextCreateChannelMerger] ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs); [RaisesException, MeasureAs=AudioContextCreateChannelMerger] ChannelMergerNode createChannelMerger(optional unsigned long numberOfInputs);
// Pause/resume
[MeasureAs=AudioContextResume, CallWith=ScriptState, ImplementedAs=resumeContext] Promise<void> resume();
// TODO(rtoy): These really belong to the AudioContext, but we need them // TODO(rtoy): These really belong to the AudioContext, but we need them
// here so we can use an offline audio context to test these. // here so we can use an offline audio context to test these.
[RaisesException, MeasureAs=AudioContextCreateMediaElementSource] MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement); [RaisesException, MeasureAs=AudioContextCreateMediaElementSource] MediaElementAudioSourceNode createMediaElementSource(HTMLMediaElement mediaElement);
......
...@@ -59,7 +59,7 @@ class MODULES_EXPORT OfflineAudioContext final : public BaseAudioContext { ...@@ -59,7 +59,7 @@ class MODULES_EXPORT OfflineAudioContext final : public BaseAudioContext {
ScriptPromise startOfflineRendering(ScriptState*); ScriptPromise startOfflineRendering(ScriptState*);
ScriptPromise suspendContext(ScriptState*, double); ScriptPromise suspendContext(ScriptState*, double);
ScriptPromise resumeContext(ScriptState*) final; ScriptPromise resumeContext(ScriptState*);
void RejectPendingResolvers() override; void RejectPendingResolvers() override;
......
...@@ -36,4 +36,5 @@ ...@@ -36,4 +36,5 @@
readonly attribute unsigned long length; readonly attribute unsigned long length;
[CallWith=ScriptState, ImplementedAs=startOfflineRendering, MeasureAs=OfflineAudioContextStartRendering] Promise<AudioBuffer> startRendering(); [CallWith=ScriptState, ImplementedAs=startOfflineRendering, MeasureAs=OfflineAudioContextStartRendering] Promise<AudioBuffer> startRendering();
[CallWith=ScriptState, ImplementedAs=suspendContext, MeasureAs=OfflineAudioContextSuspend] Promise<void> suspend(double suspendTime); [CallWith=ScriptState, ImplementedAs=suspendContext, MeasureAs=OfflineAudioContextSuspend] Promise<void> suspend(double suspendTime);
[MeasureAs=OfflineAudioContextResume, CallWith=ScriptState, ImplementedAs=resumeContext] Promise<void> resume();
}; };
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment