Commit d7c595ce authored by ehmaldonado's avatar ehmaldonado Committed by Commit bot

Separates the WebRTC browser tests that deal with audio detection.

This is to make peerconnection-call.html and webrtc_browsertest.cc
more readable

BUG=6135

Review-Url: https://codereview.chromium.org/2167133002
Cr-Commit-Position: refs/heads/master@{#407116}
parent 48d3ed9d
...@@ -45,27 +45,6 @@ class MAYBE_WebRtcBrowserTest : public WebRtcContentBrowserTest { ...@@ -45,27 +45,6 @@ class MAYBE_WebRtcBrowserTest : public WebRtcContentBrowserTest {
void MakeTypicalPeerConnectionCall(const std::string& javascript) { void MakeTypicalPeerConnectionCall(const std::string& javascript) {
MakeTypicalCall(javascript, "/media/peerconnection-call.html"); MakeTypicalCall(javascript, "/media/peerconnection-call.html");
} }
// Convenience method for making calls that detect if audio os playing (which
// has some special prerequisites, such that there needs to be an audio output
// device on the executing machine).
void MakeAudioDetectingPeerConnectionCall(const std::string& javascript) {
if (!media::AudioManager::Get()->HasAudioOutputDevices()) {
// Bots with no output devices will force the audio code into a state
// where it doesn't manage to set either the low or high latency path.
// This test will compute useless values in that case, so skip running on
// such bots (see crbug.com/326338).
LOG(INFO) << "Missing output devices: skipping test...";
return;
}
ASSERT_TRUE(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream))
<< "Must run with fake devices since the test will explicitly look "
<< "for the fake device signal.";
MakeTypicalPeerConnectionCall(javascript);
}
}; };
// These tests will make a complete PeerConnection-based call and verify that // These tests will make a complete PeerConnection-based call and verify that
...@@ -142,13 +121,6 @@ IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest, ...@@ -142,13 +121,6 @@ IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
MakeTypicalPeerConnectionCall(kJavascript); MakeTypicalPeerConnectionCall(kJavascript);
} }
// Causes asserts in libjingle: http://crbug.com/484826.
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
DISABLED_CanMakeVideoCallAndThenRenegotiateToAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndRenegotiateToAudio({audio: true, video:true}, {audio: true});");
}
// This test makes a call between pc1 and pc2 where a video only stream is sent // This test makes a call between pc1 and pc2 where a video only stream is sent
// from pc1 to pc2. The stream sent from pc1 to pc2 is cloned from the stream // from pc1 to pc2. The stream sent from pc1 to pc2 is cloned from the stream
// received on pc2 to test that cloning of remote video and audio tracks works // received on pc2 to test that cloning of remote video and audio tracks works
...@@ -219,54 +191,6 @@ IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest, AddTwoMediaStreamsToOnePC) { ...@@ -219,54 +191,6 @@ IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest, AddTwoMediaStreamsToOnePC) {
MakeTypicalPeerConnectionCall("addTwoMediaStreamsToOneConnection();"); MakeTypicalPeerConnectionCall("addTwoMediaStreamsToOneConnection();");
} }
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishAudioVideoCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioIsPlaying({audio:true, video:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishAudioOnlyCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioIsPlaying({audio:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishIsac16KCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callWithIsac16KAndEnsureAudioIsPlaying({audio:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishAudioVideoCallAndVerifyRemoteMutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureRemoteAudioTrackMutingWorks();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishAudioVideoCallAndVerifyLocalMutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureLocalAudioTrackMutingWorks();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EnsureLocalVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureLocalVideoMutingDoesntMuteAudio();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EnsureRemoteVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureRemoteVideoMutingDoesntMuteAudio();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest,
EstablishAudioVideoCallAndVerifyUnmutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioTrackUnmutingWorks();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest, CallAndVerifyVideoMutingWorks) { IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserTest, CallAndVerifyVideoMutingWorks) {
MakeTypicalPeerConnectionCall("callAndEnsureVideoTrackMutingWorks();"); MakeTypicalPeerConnectionCall("callAndEnsureVideoTrackMutingWorks();");
} }
......
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "base/files/file_util.h"
#include "base/threading/platform_thread.h"
#include "build/build_config.h"
#include "content/browser/web_contents/web_contents_impl.h"
#include "content/public/common/content_switches.h"
#include "content/public/common/webrtc_ip_handling_policy.h"
#include "content/public/test/browser_test_utils.h"
#include "content/public/test/content_browser_test_utils.h"
#include "content/public/test/test_utils.h"
#include "content/test/webrtc_content_browsertest_base.h"
#include "media/audio/audio_manager.h"
#include "media/base/media_switches.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
namespace content {
#if defined(OS_ANDROID) && defined(ADDRESS_SANITIZER)
// Renderer crashes under Android ASAN: https://crbug.com/408496.
#define MAYBE_WebRtcBrowserAudioTest DISABLED_WebRtcBrowserAudioTest
#else
#define MAYBE_WebRtcBrowserAudioTest WebRtcBrowserAudioTest
#endif
// This class tests the scenario when permission to access mic or camera is
// granted.
class MAYBE_WebRtcBrowserAudioTest : public WebRtcContentBrowserTest {
public:
MAYBE_WebRtcBrowserAudioTest() {}
~MAYBE_WebRtcBrowserAudioTest() override {}
void SetUpCommandLine(base::CommandLine* command_line) override {
WebRtcContentBrowserTest::SetUpCommandLine(command_line);
// Automatically grant device permission.
AppendUseFakeUIForMediaStreamFlag();
}
protected:
// Convenience method for making calls that detect if audio os playing (which
// has some special prerequisites, such that there needs to be an audio output
// device on the executing machine).
void MakeAudioDetectingPeerConnectionCall(const std::string& javascript) {
if (!media::AudioManager::Get()->HasAudioOutputDevices()) {
// Bots with no output devices will force the audio code into a state
// where it doesn't manage to set either the low or high latency path.
// This test will compute useless values in that case, so skip running on
// such bots (see crbug.com/326338).
LOG(INFO) << "Missing output devices: skipping test...";
return;
}
ASSERT_TRUE(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream))
<< "Must run with fake devices since the test will explicitly look "
<< "for the fake device signal.";
MakeTypicalCall(javascript, "/media/peerconnection-call-audio.html");
}
};
// Causes asserts in libjingle: http://crbug.com/484826.
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
DISABLED_CanMakeVideoCallAndThenRenegotiateToAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndRenegotiateToAudio({audio: true, video:true}, {audio: true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishAudioVideoCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioIsPlaying({audio:true, video:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishAudioOnlyCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioIsPlaying({audio:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishIsac16KCallAndEnsureAudioIsPlaying) {
MakeAudioDetectingPeerConnectionCall(
"callWithIsac16KAndEnsureAudioIsPlaying({audio:true});");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishAudioVideoCallAndVerifyRemoteMutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureRemoteAudioTrackMutingWorks();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishAudioVideoCallAndVerifyLocalMutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureLocalAudioTrackMutingWorks();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EnsureLocalVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureLocalVideoMutingDoesntMuteAudio();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EnsureRemoteVideoMuteDoesntMuteAudio) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureRemoteVideoMutingDoesntMuteAudio();");
}
IN_PROC_BROWSER_TEST_F(MAYBE_WebRtcBrowserAudioTest,
EstablishAudioVideoCallAndVerifyUnmutingWorks) {
MakeAudioDetectingPeerConnectionCall(
"callAndEnsureAudioTrackUnmutingWorks();");
}
} // namespace content
...@@ -313,6 +313,7 @@ ...@@ -313,6 +313,7 @@
'content_browsertests_webrtc_sources': [ 'content_browsertests_webrtc_sources': [
'browser/media/webrtc/webrtc_audio_debug_recordings_browsertest.cc', 'browser/media/webrtc/webrtc_audio_debug_recordings_browsertest.cc',
'browser/media/webrtc/webrtc_browsertest.cc', 'browser/media/webrtc/webrtc_browsertest.cc',
'browser/media/webrtc/webrtc_browsertest_audio.cc',
'browser/media/webrtc/webrtc_browsertest_data.cc', 'browser/media/webrtc/webrtc_browsertest_data.cc',
'browser/media/webrtc/webrtc_capture_from_element_browsertest.cc', 'browser/media/webrtc/webrtc_capture_from_element_browsertest.cc',
'browser/media/webrtc/webrtc_datachannel_browsertest.cc', 'browser/media/webrtc/webrtc_datachannel_browsertest.cc',
......
<html>
<head>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript" src="webrtc_test_common.js"></script>
<script type="text/javascript" src="webrtc_test_audio.js"></script>
<script type="text/javascript">
$ = function(id) {
return document.getElementById(id);
};
window.onerror = function(errorMsg, url, lineNumber, column, errorObj) {
failTest('Error: ' + errorMsg + '\nScript: ' + url +
'\nLine: ' + lineNumber + '\nColumn: ' + column +
'\nStackTrace: ' + errorObj);
}
var gFirstConnection = null;
var gSecondConnection = null;
var gLocalStream = null;
var gSentTones = '';
var gRemoteStreams = {};
setAllEventsOccuredHandler(reportTestSuccess);
// The second set of constraints should request audio (e.g. audio:true) since
// we expect audio to be playing after the second renegotiation.
function callAndRenegotiateToAudio(constraints, renegotiationConstraints) {
createConnections(null);
navigator.webkitGetUserMedia(constraints,
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForConnectionToStabilize(gFirstConnection, function() {
gFirstConnection.removeStream(gLocalStream);
gSecondConnection.removeStream(gLocalStream);
navigator.webkitGetUserMedia(renegotiationConstraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
ensureAudioPlaying(gSecondConnection);
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
});
}
function callAndEnsureAudioIsPlaying(constraints) {
createConnections(null);
// Add the local stream to gFirstConnection to play one-way audio.
navigator.webkitGetUserMedia(constraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
ensureAudioPlaying(gSecondConnection);
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
}
function callWithIsac16KAndEnsureAudioIsPlaying(constraints) {
setOfferSdpTransform(function(sdp) {
sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
'm=audio $1 RTP/SAVPF 103 126\r\n');
sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:103 minptime=10');
if (sdp.search('a=rtpmap:103 ISAC/16000') == -1)
failTest('Missing iSAC 16K codec on Android; cannot force codec.');
return sdp;
});
callAndEnsureAudioIsPlaying(constraints);
}
function enableRemoteVideo(peerConnection, enabled) {
remoteStream = peerConnection.getRemoteStreams()[0];
remoteStream.getVideoTracks()[0].enabled = enabled;
}
function enableRemoteAudio(peerConnection, enabled) {
remoteStream = peerConnection.getRemoteStreams()[0];
remoteStream.getAudioTracks()[0].enabled = enabled;
}
function enableLocalVideo(peerConnection, enabled) {
localStream = peerConnection.getLocalStreams()[0];
localStream.getVideoTracks()[0].enabled = enabled;
}
function enableLocalAudio(peerConnection, enabled) {
localStream = peerConnection.getLocalStreams()[0];
localStream.getAudioTracks()[0].enabled = enabled;
}
function callAndEnsureRemoteAudioTrackMutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Call is up, now mute the remote track and check we stop playing out
// audio (after a small delay, we don't expect it to happen instantly).
enableRemoteAudio(gSecondConnection, false);
ensureSilence(gSecondConnection);
});
}
function callAndEnsureLocalAudioTrackMutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Call is up, now mute the local track of the sending side and ensure
// the receiving side stops receiving audio.
enableLocalAudio(gFirstConnection, false);
ensureSilence(gSecondConnection);
});
}
function callAndEnsureAudioTrackUnmutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Mute, wait a while, unmute, verify audio gets back up.
// (Also, ensure video muting doesn't affect audio).
enableRemoteAudio(gSecondConnection, false);
enableRemoteVideo(gSecondConnection, false);
setTimeout(function() {
enableRemoteAudio(gSecondConnection, true);
}, 500);
setTimeout(function() {
ensureAudioPlaying(gSecondConnection);
}, 1500);
});
}
function callAndEnsureLocalVideoMutingDoesntMuteAudio() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableLocalVideo(gFirstConnection, false);
ensureAudioPlaying(gSecondConnection);
});
}
function callAndEnsureRemoteVideoMutingDoesntMuteAudio() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableRemoteVideo(gSecondConnection, false);
ensureAudioPlaying(gSecondConnection);
});
}
function createConnections(constraints) {
gFirstConnection = createConnection(constraints, 'remote-view-1');
assertEquals('stable', gFirstConnection.signalingState);
gSecondConnection = createConnection(constraints, 'remote-view-2');
assertEquals('stable', gSecondConnection.signalingState);
}
function createConnection(constraints, remoteView) {
var pc = new webkitRTCPeerConnection(null, constraints);
pc.onaddstream = function(event) {
onRemoteStream(event, remoteView);
}
return pc;
}
function displayAndRemember(localStream) {
var localStreamUrl = URL.createObjectURL(localStream);
$('local-view').src = localStreamUrl;
gLocalStream = localStream;
}
// Called if getUserMedia succeeds and we want to send from both connections.
function addStreamToBothConnectionsAndNegotiate(localStream) {
displayAndRemember(localStream);
gFirstConnection.addStream(localStream);
gSecondConnection.addStream(localStream);
negotiate();
}
// Called if getUserMedia succeeds when we want to send from one connection.
function addStreamToTheFirstConnectionAndNegotiate(localStream) {
displayAndRemember(localStream);
gFirstConnection.addStream(localStream);
negotiate();
}
function negotiate() {
negotiateBetween(gFirstConnection, gSecondConnection);
}
function onRemoteStream(e, target) {
console.log("Receiving remote stream...");
gRemoteStreams[target] = e.stream;
var remoteStreamUrl = URL.createObjectURL(e.stream);
var remoteVideo = $(target);
remoteVideo.src = remoteStreamUrl;
}
</script>
</head>
<body>
<table border="0">
<tr>
<td><video width="320" height="240" id="local-view" style="display:none"
autoplay muted></video></td>
<td><video width="320" height="240" id="remote-view-1"
style="display:none" autoplay></video></td>
<td><video width="320" height="240" id="remote-view-2"
style="display:none" autoplay></video></td>
<!-- Canvases are named after their corresponding video elements. -->
<td><canvas width="320" height="240" id="remote-view-1-canvas"
style="display:none"></canvas></td>
<td><canvas width="320" height="240" id="remote-view-2-canvas"
style="display:none"></canvas></td>
</tr>
</table>
</body>
</html>
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
<head> <head>
<script type="text/javascript" src="webrtc_test_utilities.js"></script> <script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript" src="webrtc_test_common.js"></script> <script type="text/javascript" src="webrtc_test_common.js"></script>
<script type="text/javascript" src="webrtc_test_audio.js"></script>
<script type="text/javascript"> <script type="text/javascript">
$ = function(id) { $ = function(id) {
return document.getElementById(id); return document.getElementById(id);
...@@ -17,7 +16,6 @@ ...@@ -17,7 +16,6 @@
var gFirstConnection = null; var gFirstConnection = null;
var gSecondConnection = null; var gSecondConnection = null;
var gTestWithoutMsid = false;
var gLocalStream = null; var gLocalStream = null;
var gRemoteStreams = {}; var gRemoteStreams = {};
...@@ -235,10 +233,6 @@ ...@@ -235,10 +233,6 @@
function onRemoteStream(e, target) { function onRemoteStream(e, target) {
console.log("Receiving remote stream..."); console.log("Receiving remote stream...");
if (gTestWithoutMsid && e.stream.id != "default") {
failTest('a default remote stream was expected but instead ' +
e.stream.id + ' was received.');
}
gRemoteStreams[target] = e.stream; gRemoteStreams[target] = e.stream;
var remoteStreamUrl = URL.createObjectURL(e.stream); var remoteStreamUrl = URL.createObjectURL(e.stream);
var remoteVideo = $(target); var remoteVideo = $(target);
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
<head> <head>
<script type="text/javascript" src="webrtc_test_utilities.js"></script> <script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript" src="webrtc_test_common.js"></script> <script type="text/javascript" src="webrtc_test_common.js"></script>
<script type="text/javascript" src="webrtc_test_audio.js"></script>
<script type="text/javascript"> <script type="text/javascript">
$ = function(id) { $ = function(id) {
return document.getElementById(id); return document.getElementById(id);
...@@ -22,7 +21,6 @@ ...@@ -22,7 +21,6 @@
var gRemoteStreams = {}; var gRemoteStreams = {};
setAllEventsOccuredHandler(reportTestSuccess); setAllEventsOccuredHandler(reportTestSuccess);
// Test that we can setup a call with an audio and video track (must request // Test that we can setup a call with an audio and video track (must request
...@@ -99,28 +97,6 @@ ...@@ -99,28 +97,6 @@
}); });
} }
// The second set of constraints should request audio (e.g. audio:true) since
// we expect audio to be playing after the second renegotiation.
function callAndRenegotiateToAudio(constraints, renegotiationConstraints) {
createConnections(null);
navigator.webkitGetUserMedia(constraints,
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForConnectionToStabilize(gFirstConnection, function() {
gFirstConnection.removeStream(gLocalStream);
gSecondConnection.removeStream(gLocalStream);
navigator.webkitGetUserMedia(renegotiationConstraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
ensureAudioPlaying(gSecondConnection);
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
});
}
// First makes a call between pc1 and pc2 where a stream is sent from pc1 to // First makes a call between pc1 and pc2 where a stream is sent from pc1 to
// pc2. The stream sent from pc1 to pc2 is cloned from the stream received on // pc2. The stream sent from pc1 to pc2 is cloned from the stream received on
// pc2 to test that cloning of remote video tracks works as intended and is // pc2 to test that cloning of remote video tracks works as intended and is
...@@ -270,33 +246,6 @@ ...@@ -270,33 +246,6 @@
offerOptions); offerOptions);
} }
function callAndEnsureAudioIsPlaying(constraints) {
createConnections(null);
// Add the local stream to gFirstConnection to play one-way audio.
navigator.webkitGetUserMedia(constraints,
addStreamToTheFirstConnectionAndNegotiate, printGetUserMediaError);
var onCallEstablished = function() {
ensureAudioPlaying(gSecondConnection);
};
waitForConnectionToStabilize(gFirstConnection, onCallEstablished);
}
function callWithIsac16KAndEnsureAudioIsPlaying(constraints) {
setOfferSdpTransform(function(sdp) {
sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
'm=audio $1 RTP/SAVPF 103 126\r\n');
sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:103 minptime=10');
if (sdp.search('a=rtpmap:103 ISAC/16000') == -1)
failTest('Missing iSAC 16K codec on Android; cannot force codec.');
return sdp;
});
callAndEnsureAudioIsPlaying(constraints);
}
function enableRemoteVideo(peerConnection, enabled) { function enableRemoteVideo(peerConnection, enabled) {
remoteStream = peerConnection.getRemoteStreams()[0]; remoteStream = peerConnection.getRemoteStreams()[0];
remoteStream.getVideoTracks()[0].enabled = enabled; remoteStream.getVideoTracks()[0].enabled = enabled;
...@@ -307,78 +256,6 @@ ...@@ -307,78 +256,6 @@
remoteStream.getAudioTracks()[0].enabled = enabled; remoteStream.getAudioTracks()[0].enabled = enabled;
} }
function enableLocalVideo(peerConnection, enabled) {
localStream = peerConnection.getLocalStreams()[0];
localStream.getVideoTracks()[0].enabled = enabled;
}
function enableLocalAudio(peerConnection, enabled) {
localStream = peerConnection.getLocalStreams()[0];
localStream.getAudioTracks()[0].enabled = enabled;
}
function callAndEnsureRemoteAudioTrackMutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Call is up, now mute the remote track and check we stop playing out
// audio (after a small delay, we don't expect it to happen instantly).
enableRemoteAudio(gSecondConnection, false);
ensureSilence(gSecondConnection);
});
}
function callAndEnsureLocalAudioTrackMutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Call is up, now mute the local track of the sending side and ensure
// the receiving side stops receiving audio.
enableLocalAudio(gFirstConnection, false);
ensureSilence(gSecondConnection);
});
}
function callAndEnsureAudioTrackUnmutingWorks() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
// Mute, wait a while, unmute, verify audio gets back up.
// (Also, ensure video muting doesn't affect audio).
enableRemoteAudio(gSecondConnection, false);
enableRemoteVideo(gSecondConnection, false);
setTimeout(function() {
enableRemoteAudio(gSecondConnection, true);
}, 500);
setTimeout(function() {
ensureAudioPlaying(gSecondConnection);
}, 1500);
});
}
function callAndEnsureLocalVideoMutingDoesntMuteAudio() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableLocalVideo(gFirstConnection, false);
ensureAudioPlaying(gSecondConnection);
});
}
function callAndEnsureRemoteVideoMutingDoesntMuteAudio() {
callAndEnsureAudioIsPlaying({audio: true, video: true});
setAllEventsOccuredHandler(function() {
setAllEventsOccuredHandler(reportTestSuccess);
enableRemoteVideo(gSecondConnection, false);
ensureAudioPlaying(gSecondConnection);
});
}
function callAndEnsureVideoTrackMutingWorks() { function callAndEnsureVideoTrackMutingWorks() {
createConnections(null); createConnections(null);
navigator.webkitGetUserMedia({audio: true, video: true}, navigator.webkitGetUserMedia({audio: true, video: true},
...@@ -710,7 +587,6 @@ ...@@ -710,7 +587,6 @@
return sdp.replace(/a=group:BUNDLE .*\r\n/g, ''); return sdp.replace(/a=group:BUNDLE .*\r\n/g, '');
} }
function onRemoteStream(e, target) { function onRemoteStream(e, target) {
console.log("Receiving remote stream..."); console.log("Receiving remote stream...");
if (gTestWithoutMsid && e.stream.id != "default") { if (gTestWithoutMsid && e.stream.id != "default") {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment