Commit 08965ad8 authored by phoglund's avatar phoglund Committed by Commit bot

Adding local stream mute tests, tightening up WebRTC audio tests.

These tests were originally made to cover http://crbug.com/398157.
Alas, that bug turned out to be very difficult to test since it does
not affect energy levels. That's what we use as a lightweight way to
detect whether audio is playing, but the bug managed to make audio stop
playing without affecting the levels. Still, I think the tests are
useful.

BUG=402860
R=perkj@chromium.org

Review URL: https://codereview.chromium.org/468243003

Cr-Commit-Position: refs/heads/master@{#291692}
parent fca0f33a
...@@ -357,9 +357,31 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, ...@@ -357,9 +357,31 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
} }
IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
EstablishAudioVideoCallAndVerifyMutingWorks) { EstablishAudioVideoCallAndVerifyRemoteMutingWorks) {
MakeAudioDetectingPeerConnectionCall(base::StringPrintf( MakeAudioDetectingPeerConnectionCall(base::StringPrintf(
"callAndEnsureAudioTrackMutingWorks(%s);", kUseLenientAudioChecking)); "callAndEnsureRemoteAudioTrackMutingWorks(%s);",
kUseLenientAudioChecking));
}
IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
EstablishAudioVideoCallAndVerifyLocalMutingWorks) {
MakeAudioDetectingPeerConnectionCall(base::StringPrintf(
"callAndEnsureLocalAudioTrackMutingWorks(%s);",
kUseLenientAudioChecking));
}
IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
EnsureLocalVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(base::StringPrintf(
"callAndEnsureLocalVideoMutingDoesntMuteAudio(%s);",
kUseLenientAudioChecking));
}
IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
EnsureRemoteVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(base::StringPrintf(
"callAndEnsureRemoteVideoMutingDoesntMuteAudio(%s);",
kUseLenientAudioChecking));
} }
// Flaky on TSAN v2: http://crbug.com/373637 // Flaky on TSAN v2: http://crbug.com/373637
......
...@@ -159,7 +159,7 @@ ...@@ -159,7 +159,7 @@
waitForVideo('remote-view-2'); waitForVideo('remote-view-2');
}); });
} }
// Test that we can setup call with an audio and video track and // Test that we can setup call with an audio and video track and
// simulate that the remote peer don't support MSID. // simulate that the remote peer don't support MSID.
function callWithoutMsidAndBundle() { function callWithoutMsidAndBundle() {
...@@ -325,55 +325,69 @@ ...@@ -325,55 +325,69 @@
offerOptions); offerOptions);
} }
function callAndEnsureAudioIsPlaying(beLenient, constraints) {
createConnections(null);
// Add the local stream to gFirstConnection to play one-way audio.
navigator.webkitGetUserMedia(constraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
ensureAudioPlaying(gSecondConnection, beLenient);
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
}
function enableRemoteVideo(peerConnection, enabled) { function enableRemoteVideo(peerConnection, enabled) {
remoteStream = peerConnection.getRemoteStreams()[0]; remoteStream = peerConnection.getRemoteStreams()[0];
remoteVideoTrack = remoteStream.getVideoTracks()[0]; remoteStream.getVideoTracks()[0].enabled = enabled;
remoteVideoTrack.enabled = enabled;
} }
function enableRemoteAudio(peerConnection, enabled) { function enableRemoteAudio(peerConnection, enabled) {
remoteStream = peerConnection.getRemoteStreams()[0]; remoteStream = peerConnection.getRemoteStreams()[0];
remoteAudioTrack = remoteStream.getAudioTracks()[0]; remoteStream.getAudioTracks()[0].enabled = enabled;
remoteAudioTrack.enabled = enabled;
} }
function callAndEnsureAudioIsPlaying(beLenient, constraints) { function enableLocalVideo(peerConnection, enabled) {
createConnections(null); localStream = peerConnection.getLocalStreams()[0];
navigator.webkitGetUserMedia(constraints, localStream.getVideoTracks()[0].enabled = enabled;
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError); }
// Wait until we have gathered samples and can conclude if audio is playing.
addExpectedEvent();
var onCallEstablished = function() {
// Gather 50 samples per second for 2 seconds.
gatherAudioLevelSamples(gSecondConnection, 100, 50, function(samples) {
verifyAudioIsPlaying(samples, beLenient);
eventOccured();
});
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished); function enableLocalAudio(peerConnection, enabled) {
localStream = peerConnection.getLocalStreams()[0];
localStream.getAudioTracks()[0].enabled = enabled;
} }
function callAndEnsureAudioTrackMutingWorks(beLenient) { function callAndEnsureRemoteAudioTrackMutingWorks(beLenient) {
callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true}); callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
setAllEventsOccuredHandler(function() { setAllEventsOccuredHandler(function() {
// Call is up, now mute the track and check everything goes silent (give setAllEventsOccuredHandler(reportTestSuccess);
// it a small delay though, we don't expect it to happen instantly).
// Call is up, now mute the remote track and check we stop playing out
// audio (after a small delay, we don't expect it to happen instantly).
enableRemoteAudio(gSecondConnection, false); enableRemoteAudio(gSecondConnection, false);
ensureSilence(gSecondConnection);
});
}
setTimeout(function() { function callAndEnsureLocalAudioTrackMutingWorks(beLenient) {
gatherAudioLevelSamples(gSecondConnection, 100, 50, function(samples) { callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
verifyIsSilent(samples); setAllEventsOccuredHandler(function() {
reportTestSuccess(); setAllEventsOccuredHandler(reportTestSuccess);
});
}, 500); // Call is up, now mute the local track of the sending side and ensure
// the receiving side stops receiving audio.
enableLocalAudio(gFirstConnection, false);
ensureSilence(gSecondConnection);
}); });
} }
function callAndEnsureAudioTrackUnmutingWorks(beLenient) { function callAndEnsureAudioTrackUnmutingWorks(beLenient) {
callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true}); callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
setAllEventsOccuredHandler(function() { setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Mute, wait a while, unmute, verify audio gets back up. // Mute, wait a while, unmute, verify audio gets back up.
// (Also, ensure video muting doesn't affect audio). // (Also, ensure video muting doesn't affect audio).
enableRemoteAudio(gSecondConnection, false); enableRemoteAudio(gSecondConnection, false);
...@@ -384,16 +398,29 @@ ...@@ -384,16 +398,29 @@
}, 500); }, 500);
setTimeout(function() { setTimeout(function() {
// Sample for four seconds here; it can take a bit of time for audio to ensureAudioPlaying(gSecondConnection, beLenient);
// get back up after the unmute.
gatherAudioLevelSamples(gSecondConnection, 200, 50, function(samples) {
verifyAudioIsPlaying(samples, beLenient);
reportTestSuccess();
});
}, 1500); }, 1500);
}); });
} }
function callAndEnsureLocalVideoMutingDoesntMuteAudio(beLenient) {
callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableLocalVideo(gFirstConnection, false);
ensureAudioPlaying(gSecondConnection, beLenient);
});
}
function callAndEnsureRemoteVideoMutingDoesntMuteAudio(beLenient) {
callAndEnsureAudioIsPlaying(beLenient, {audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableRemoteVideo(gSecondConnection, false);
ensureAudioPlaying(gSecondConnection, beLenient);
});
}
function callAndEnsureVideoTrackMutingWorks() { function callAndEnsureVideoTrackMutingWorks() {
createConnections(null); createConnections(null);
navigator.webkitGetUserMedia({audio: true, video: true}, navigator.webkitGetUserMedia({audio: true, video: true},
...@@ -753,7 +780,7 @@ ...@@ -753,7 +780,7 @@
} }
function receiveOffer(offerSdp, caller, callee) { function receiveOffer(offerSdp, caller, callee) {
console.log("Receiving offer...\n" + offerSdp); console.log("Receiving offer...");
offerSdp = transformRemoteSdp(offerSdp); offerSdp = transformRemoteSdp(offerSdp);
var parsedOffer = new RTCSessionDescription({ type: 'offer', var parsedOffer = new RTCSessionDescription({ type: 'offer',
...@@ -874,16 +901,16 @@ ...@@ -874,16 +901,16 @@
<td>Remote Stream for Connection 4</td> <td>Remote Stream for Connection 4</td>
</tr> </tr>
<tr> <tr>
<td><video width="320" height="240" id="local-view" <td><video width="320" height="240" id="local-view" autoplay muted>
autoplay="autoplay"></video></td> </video></td>
<td><video width="320" height="240" id="remote-view-1" <td><video width="320" height="240" id="remote-view-1" autoplay>
autoplay="autoplay"></video></td> </video></td>
<td><video width="320" height="240" id="remote-view-2" <td><video width="320" height="240" id="remote-view-2" autoplay>
autoplay="autoplay"></video></td> </video></td>
<td><video width="320" height="240" id="remote-view-3" <td><video width="320" height="240" id="remote-view-3" autoplay>
autoplay="autoplay"></video></td> </video></td>
<td><video width="320" height="240" id="remote-view-4" <td><video width="320" height="240" id="remote-view-4" autoplay>
autoplay="autoplay"></video></td> </video></td>
<!-- Canvases are named after their corresponding video elements. --> <!-- Canvases are named after their corresponding video elements. -->
<td><canvas width="320" height="240" id="remote-view-1-canvas" <td><canvas width="320" height="240" id="remote-view-1-canvas"
style="display:none"></canvas></td> style="display:none"></canvas></td>
......
...@@ -7,6 +7,33 @@ ...@@ -7,6 +7,33 @@
// GetStats reports audio output energy in the [0, 32768] range. // GetStats reports audio output energy in the [0, 32768] range.
var MAX_AUDIO_OUTPUT_ENERGY = 32768; var MAX_AUDIO_OUTPUT_ENERGY = 32768;
// Queries WebRTC stats on |peerConnection| to find out whether audio is playing
// on the connection. Note this does not necessarily mean the audio is actually
// playing out (for instance if there's a bug in the WebRTC web media player).
// If |beLenient| is true, we assume we're on a slow and unreliable bot and that
// we should do a minimum of checking.
function ensureAudioPlaying(peerConnection, beLenient) {
addExpectedEvent();
// Gather 50 samples per second for 2 seconds.
gatherAudioLevelSamples(peerConnection, 100, 50, function(samples) {
identifyFakeDeviceSignal_(samples, beLenient);
eventOccured();
});
}
// Queries WebRTC stats on |peerConnection| to find out whether audio is muted
// on the connection.
function ensureSilence(peerConnection) {
addExpectedEvent();
setTimeout(function() {
gatherAudioLevelSamples(peerConnection, 100, 50, function(samples) {
identifySilence_(samples);
eventOccured();
});
}, 500);
}
// Gathers |numSamples| samples at |frequency| number of times per second and // Gathers |numSamples| samples at |frequency| number of times per second and
// calls back |callback| with an array with numbers in the [0, 32768] range. // calls back |callback| with an array with numbers in the [0, 32768] range.
function gatherAudioLevelSamples(peerConnection, numSamples, frequency, function gatherAudioLevelSamples(peerConnection, numSamples, frequency,
...@@ -34,15 +61,19 @@ function gatherAudioLevelSamples(peerConnection, numSamples, frequency, ...@@ -34,15 +61,19 @@ function gatherAudioLevelSamples(peerConnection, numSamples, frequency,
}, 1000 / frequency); }, 1000 / frequency);
} }
// Tries to identify the beep-every-half-second signal generated by the fake /**
// audio device in media/video/capture/fake_video_capture_device.cc. Fails the * Tries to identify the beep-every-half-second signal generated by the fake
// test if we can't see a signal. The samples should have been gathered over at * audio device in media/video/capture/fake_video_capture_device.cc. Fails the
// least two seconds since we expect to see at least three "peaks" in there * test if we can't see a signal. The samples should have been gathered over at
// (we should see either 3 or 4 depending on how things line up). * least two seconds since we expect to see at least three "peaks" in there
// * (we should see either 3 or 4 depending on how things line up).
// If |beLenient| is specified, we assume we're running on a slow device or *
// or under TSAN, and relax the checks quite a bit. * If |beLenient| is specified, we assume we're running on a slow device or
function verifyAudioIsPlaying(samples, beLenient) { * or under TSAN, and relax the checks quite a bit.
*
* @private
*/
function identifyFakeDeviceSignal_(samples, beLenient) {
var numPeaks = 0; var numPeaks = 0;
var threshold = MAX_AUDIO_OUTPUT_ENERGY * 0.7; var threshold = MAX_AUDIO_OUTPUT_ENERGY * 0.7;
if (beLenient) if (beLenient)
...@@ -69,12 +100,15 @@ function verifyAudioIsPlaying(samples, beLenient) { ...@@ -69,12 +100,15 @@ function verifyAudioIsPlaying(samples, beLenient) {
samples + '"'); samples + '"');
} }
// If silent (like when muted), we should get very near zero audio level. /**
function verifyIsSilent(samples) { * @private
*/
function identifySilence_(samples) {
var average = 0; var average = 0;
for (var i = 0; i < samples.length; ++i) for (var i = 0; i < samples.length; ++i)
average += samples[i] / samples.length; average += samples[i] / samples.length;
// If silent (like when muted), we should get very near zero audio level.
console.log('Average audio level: ' + average); console.log('Average audio level: ' + average);
if (average > 500) if (average > 500)
failTest('Expected silence, but avg audio level was ' + average); failTest('Expected silence, but avg audio level was ' + average);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment