Commit 43ebe9ea authored by phoglund@chromium.org's avatar phoglund@chromium.org

Refactored how WebRTC content browser tests talk to their javascript.

We will now use the domAutomationController rather than looking at the
page title. This will confer the following advantages:

- Tests will fail right away when the js detects an error, rather than
  timing out.
- We will get a lot better stack traces when errors do occur.
- The communication path to the tests becomes more flexible with
  possibility for custom return values (for instance if we need to
  verify something in the C++ code or have a multi-stage test).

BUG=None
NOTRY=True

(doing notry since CQ gets stuck on telemetry test on mac_rel)

Review URL: https://codereview.chromium.org/190563002

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255795 0039d316-1c4b-4281-b951-d872f2087c98
parent 35c55662
......@@ -67,12 +67,12 @@ IN_PROC_BROWSER_TEST_F(WebRtcAecDumpBrowserTest, MAYBE_CallWithAecDump) {
NavigateToURL(shell(), url);
#if defined (OS_ANDROID)
// Always force iSAC 16K on Android for now (Opus is broken).
ASSERT_TRUE(ExecuteJavascript("forceIsac16KInSdp();"));
// Always force iSAC 16K on Android for now (Opus is broken).
EXPECT_EQ("isac-forced",
ExecuteJavascriptAndReturnResult("forceIsac16KInSdp();"));
#endif
EXPECT_TRUE(ExecuteJavascript("call({video: true, audio: true});"));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
EXPECT_TRUE(base::PathExists(dump_file_));
int64 file_size = 0;
......
......@@ -34,17 +34,17 @@ class WebRtcBrowserTest : public WebRtcContentBrowserTest {
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk(javascript);
DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk(javascript);
}
void ExecuteTestAndWaitForOk(const std::string& javascript) {
void DisableOpusIfOnAndroid() {
#if defined (OS_ANDROID)
// Always force iSAC 16K on Android for now (Opus is broken).
ASSERT_TRUE(ExecuteJavascript("forceIsac16KInSdp();"));
EXPECT_EQ("isac-forced",
ExecuteJavascriptAndReturnResult("forceIsac16KInSdp();"));
#endif
ASSERT_TRUE(ExecuteJavascript(javascript));
ExpectTitle("OK");
}
};
......@@ -331,7 +331,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, MAYBE_CallWithAecDump) {
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk("call({video: true, audio: true});");
DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
EXPECT_TRUE(base::PathExists(dump_file));
int64 file_size = 0;
......@@ -367,7 +368,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk("call({video: true, audio: true});");
DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
EXPECT_TRUE(base::PathExists(dump_file));
int64 file_size = 0;
......
......@@ -28,14 +28,14 @@ using trace_analyzer::TraceEventVector;
namespace {
static const char kGetUserMediaAndStop[] = "getUserMediaAndStop";
static const char kGetUserMediaAndWaitAndStop[] = "getUserMediaAndWaitAndStop";
static const char kGetUserMediaAndGetStreamUp[] = "getUserMediaAndGetStreamUp";
static const char kGetUserMediaAndAnalyseAndStop[] =
"getUserMediaAndAnalyseAndStop";
static const char kGetUserMediaAndExpectFailure[] =
"getUserMediaAndExpectFailure";
// Results returned by JS.
static const char kOK[] = "OK";
static const char kGetUserMediaFailed[] =
"GetUserMedia call failed with code undefined";
std::string GenerateGetUserMediaWithMandatorySourceID(
const std::string& function_name,
......@@ -109,20 +109,23 @@ class WebRtcGetUserMediaBrowserTest: public WebRtcContentBrowserTest {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url);
// Put getUserMedia to work and let it run for a couple of seconds.
DCHECK(time_to_sample_secs);
ASSERT_TRUE(
ExecuteJavascript(base::StringPrintf("%s({video: true}, %d);",
kGetUserMediaAndWaitAndStop,
time_to_sample_secs)));
ASSERT_EQ("ok-stream-running",
ExecuteJavascriptAndReturnResult(
base::StringPrintf("%s({video: true});",
kGetUserMediaAndGetStreamUp)));
// Make sure the stream is up and running, then start collecting traces.
ExpectTitle("Running...");
// Now the stream is up and running, start collecting traces.
StartTracing();
// Let the stream run for a while in javascript.
ExecuteJavascriptAndWaitForOk(
base::StringPrintf("waitAndStopVideoTrack(%d);", time_to_sample_secs));
// Wait until the page title changes to "OK". Do not sleep() here since that
// would stop both this code and the browser underneath.
ExpectTitle("OK");
StopTracing();
scoped_ptr<TraceAnalyzer> analyzer(CreateTraceAnalyzer());
......@@ -214,10 +217,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, GetVideoStreamAndStop) {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(
base::StringPrintf("%s({video: true});", kGetUserMediaAndStop)));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk(
base::StringPrintf("%s({video: true});", kGetUserMediaAndStop));
}
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
......@@ -227,10 +228,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(base::StringPrintf(
"%s({video: true, audio: true});", kGetUserMediaAndStop)));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk(base::StringPrintf(
"%s({video: true, audio: true});", kGetUserMediaAndStop));
}
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
......@@ -240,9 +239,7 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript("getUserMediaAndClone();"));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk("getUserMediaAndClone();");
}
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
......@@ -282,25 +279,25 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
// Test with invalid mandatory audio sourceID.
NavigateToURL(shell(), url);
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult(
ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop,
kGetUserMediaAndExpectFailure,
"something invalid",
video_ids[0])));
video_ids[0]));
// Test with invalid mandatory video sourceID.
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult(
ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop,
kGetUserMediaAndExpectFailure,
audio_ids[0],
"something invalid")));
"something invalid"));
// Test with empty mandatory audio sourceID.
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult(
ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop,
kGetUserMediaAndExpectFailure,
"",
video_ids[0])));
video_ids[0]));
}
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
......@@ -342,10 +339,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, TwoGetUserMediaAndStop) {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(
"twoGetUserMediaAndStop({video: true, audio: true});"));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk(
"twoGetUserMediaAndStop({video: true, audio: true});");
}
// This test will make a simple getUserMedia page, verify that video is playing
......@@ -388,8 +383,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
// TODO(mcasas): add more aspect ratios, in particular 16:10 crbug.com/275594.
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(constraints_4_3));
ExpectTitle("4:3 letterbox");
ASSERT_EQ("4:3 letterbox",
ExecuteJavascriptAndReturnResult(constraints_4_3));
}
// This test calls getUserMedia and checks for aspect ratio behavior.
......@@ -406,8 +401,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
kGetUserMediaAndAnalyseAndStop, 640, 640, 360, 360, 30, 30);
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(constraints_16_9));
ExpectTitle("16:9 letterbox");
ASSERT_EQ("16:9 letterbox",
ExecuteJavascriptAndReturnResult(constraints_16_9));
}
namespace {
......@@ -449,8 +444,7 @@ IN_PROC_BROWSER_TEST_P(WebRtcConstraintsBrowserTest, GetUserMediaConstraints) {
user_media().max_frame_rate);
DVLOG(1) << "Calling getUserMedia: " << call;
NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(call));
ExpectTitle("OK");
ExecuteJavascriptAndWaitForOk(call);
}
static const UserMediaSizes kAllUserMediaSizes[] = {
......
......@@ -10,15 +10,9 @@
setAllEventsOccuredHandler(function() {
gLocalStream.stop();
document.title = 'OK';
sendValueToTest(document.title);
reportTestSuccess();
});
function sendValueToTest(value) {
window.domAutomationController.setAutomationId(0);
window.domAutomationController.send(value);
}
function getSources() {
MediaStreamTrack.getSources(function(devices) {
document.title = 'Sources Available';
......@@ -29,30 +23,42 @@
// Creates a MediaStream and renders it locally. When the video is detected to
// be rolling, the title is changed and the stream should be stopped.
function getUserMediaAndStop(constraints) {
document.title = 'Calling GetUserMedia';
console.log('Calling getUserMediaAndStop.');
navigator.webkitGetUserMedia(
constraints,
function(stream) { displayAndDetectVideo(stream, stopVideoTrack); },
failedCallback);
}
// Requests getusermedia and expects it to fail.
function getUserMediaAndExpectFailure(constraints) {
console.log('Calling getUserMediaAndExpectFailure.');
navigator.webkitGetUserMedia(
constraints,
function(stream) { failTest('Unexpectedly succeeded getUserMedia.'); },
function(error) { reportTestSuccess(); });
}
// Creates a MediaStream and renders it locally. When the video is detected to
// be rolling, the title should be changed and the stream is let roll for a
// number |waitTimeInSeconds| and then it should be stopped.
function getUserMediaAndWaitAndStop(constraints, waitTimeInSeconds) {
// be rolling we return ok-stream-running through the automation controller.
function getUserMediaAndGetStreamUp(constraints, waitTimeInSeconds) {
console.log('Calling getUserMediaAndGetStreamUp.');
navigator.webkitGetUserMedia(
constraints,
function(stream) {
displayAndDetectVideo(
stream,
function() {
waitAndStopVideoTrack(waitTimeInSeconds);
sendValueToTest('ok-stream-running');
});
},
failedCallback);
}
// Gets a video stream up, analyses it and returns the aspect ratio to the
// test through the automation controller.
function getUserMediaAndAnalyseAndStop(constraints) {
console.log('Calling getUserMediaAndAnalyseAndStop.');
navigator.webkitGetUserMedia(
constraints, displayDetectAndAnalyzeVideo, failedCallback);
}
......@@ -60,6 +66,7 @@
// This test that a MediaStream can be cloned and that the clone can
// be rendered.
function getUserMediaAndClone() {
console.log('Calling getUserMediaAndClone.');
navigator.webkitGetUserMedia({video: true, audio: true},
createAndRenderClone, failedCallback);
}
......@@ -69,36 +76,35 @@
// streams have the same source, both video streams should stop. If they do,
// the test succeeds.
function twoGetUserMediaAndStop(constraints) {
document.title = 'Calling Two GetUserMedia';
console.log('Calling Two GetUserMedia');
navigator.webkitGetUserMedia(
constraints,
function(stream) {
displayAndDetectVideo(stream, requestSecondGetUserMedia);
function(stream) {
displayAndDetectVideo(stream, requestSecondGetUserMedia);
},
failedCallback);
var requestSecondGetUserMedia = function() {
navigator.webkitGetUserMedia(
constraints,
function(stream) {
displayIntoVideoElement(stream,
function(stream) {
displayIntoVideoElement(stream,
stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2');
},
failedCallback);
};
var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() {
gLocalStream.getVideoTracks()[0].stop();
// Since local-view and local-view-2 are playing the video from the same
// source, both of them should stop.
waitForVideoToStop('local-view');
waitForVideoToStop('local-view-2');
};
};
}
function failedCallback(error) {
document.title = 'GetUserMedia call failed with code ' + error.code;
sendValueToTest(document.title);
failTest('GetUserMedia call failed with code ' + error.code);
}
function plugStreamIntoVideoElement(stream, videoElement) {
......@@ -109,7 +115,6 @@
function displayIntoVideoElement(stream, callback, videoElement) {
plugStreamIntoVideoElement(stream, videoElement);
document.title = 'Waiting for video...';
detectVideoPlaying(videoElement, callback);
}
......@@ -128,12 +133,12 @@
// work with audio devices and not all bots has a microphone.
new_stream = new webkitMediaStream();
new_stream.addTrack(stream.getVideoTracks()[0]);
expectEquals(new_stream.getVideoTracks().length, 1);
assertEquals(new_stream.getVideoTracks().length, 1);
if (stream.getAudioTracks().length > 0) {
new_stream.addTrack(stream.getAudioTracks()[0]);
expectEquals(new_stream.getAudioTracks().length, 1);
assertEquals(new_stream.getAudioTracks().length, 1);
new_stream.removeTrack(new_stream.getAudioTracks()[0]);
expectEquals(new_stream.getAudioTracks().length, 0);
assertEquals(new_stream.getAudioTracks().length, 0);
}
var newStreamUrl = URL.createObjectURL(new_stream);
......@@ -147,19 +152,102 @@
}
function waitAndStopVideoTrack(waitTimeInSeconds) {
document.title = 'Running...';
setTimeout(stopVideoTrack, waitTimeInSeconds * 1000);
}
function analyzeVideo() {
document.title = 'Waiting for video...';
addExpectedEvent();
detectAspectRatio(function(aspectRatio) {
document.title = aspectRatio;
eventOccured();
sendValueToTest(aspectRatio);
});
}
// This function tries to calculate the aspect ratio shown by the fake capture
// device in the video tag. For this, we count the amount of light green
// pixels along |aperture| pixels on the positive X and Y axis starting from
// the center of the image. In this very center there should be a time-varying
// pacman; the algorithm counts for a couple of iterations and keeps the
// maximum amount of light green pixels on both directions. From this data
// the aspect ratio is calculated relative to a 320x240 window, so 4:3 would
// show as a 1. Furthermore, since an original non-4:3 might be letterboxed or
// cropped, the actual X and Y pixel amounts are compared with the fake video
// capture expected pacman radius (see further below).
function detectAspectRatio(callback) {
var width = VIDEO_TAG_WIDTH;
var height = VIDEO_TAG_HEIGHT;
var videoElement = $('local-view');
var canvas = $('local-view-canvas');
var maxLightGreenPixelsX = 0;
var maxLightGreenPixelsY = 0;
var aperture = Math.min(width, height) / 2;
var iterations = 0;
var maxIterations = 10;
var detectorFunction = function() {
var context = canvas.getContext('2d');
context.drawImage(videoElement, 0, 0, width, height);
// We are interested in a window starting from the center of the image
// where we expect the circle from the fake video capture to be rolling.
var pixels = context.getImageData(width / 2, height / 2,
aperture, aperture);
var lightGreenPixelsX = 0;
var lightGreenPixelsY = 0;
// Walk horizontally counting light green pixels.
for (var x = 0; x < aperture; ++x) {
if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN)
lightGreenPixelsX++;
}
// Walk vertically counting light green pixels.
for (var y = 0; y < aperture; ++y) {
if (pixels.data[4 * y * aperture + 1] != 135)
lightGreenPixelsY++;
}
if (lightGreenPixelsX > maxLightGreenPixelsX &&
lightGreenPixelsX < aperture)
maxLightGreenPixelsX = lightGreenPixelsX;
if (lightGreenPixelsY > maxLightGreenPixelsY &&
lightGreenPixelsY < aperture)
maxLightGreenPixelsY = lightGreenPixelsY;
var detectedAspectRatioString = "";
if (++iterations > maxIterations) {
clearInterval(detectorFunction);
observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX;
// At this point the observed aspect ratio is either 1, for undistorted
// 4:3, or some other aspect ratio that is seen as distorted.
if (Math.abs(observedAspectRatio - 1.333) < 0.1)
detectedAspectRatioString = "16:9";
else if (Math.abs(observedAspectRatio - 1.20) < 0.1)
detectedAspectRatioString = "16:10";
else if (Math.abs(observedAspectRatio - 1.0) < 0.1)
detectedAspectRatioString = "4:3";
else
detectedAspectRatioString = "UNKNOWN aspect ratio";
console.log(detectedAspectRatioString + " observed aspect ratio (" +
observedAspectRatio + ")");
// The FakeVideoCapture calculates the circle radius as
// std::min(capture_format_.width, capture_format_.height) / 4;
// we do the same and see if both dimensions are scaled, meaning
// we started from a cropped or stretched image.
var nonDistortedRadius = Math.min(width, height) / 4;
if ((maxLightGreenPixelsX != nonDistortedRadius) &&
(maxLightGreenPixelsY != nonDistortedRadius)) {
detectedAspectRatioString += " cropped";
} else
detectedAspectRatioString += " letterbox";
console.log("Original image is: " + detectedAspectRatioString);
callback(detectedAspectRatioString);
}
}
setInterval(detectorFunction, 50);
}
</script>
</head>
<body>
......
......@@ -37,6 +37,7 @@
'');
return sdp;
};
sendValueToTest('isac-forced');
}
// When using external SDES, the crypto key is chosen by javascript.
......@@ -53,10 +54,7 @@
var EXTERNAL_GICE_UFRAG = '1234567890123456';
var EXTERNAL_GICE_PWD = '123456789012345678901234';
setAllEventsOccuredHandler(function() {
// The C++ tests look for this 'OK' in the title.
document.title = 'OK';
});
setAllEventsOccuredHandler(reportTestSuccess);
// Test that we can setup call with an audio and video track.
function call(constraints) {
......@@ -144,14 +142,13 @@
function negotiateUnsupportedVideoCodec() {
createConnections(null);
transformSdp = removeVideoCodec;
onLocalDescriptionError = function(error) {
var expectedMsg = 'Failed to set local offer sdp:' +
' Session error code: ERROR_CONTENT. Session error description:' +
' Failed to set video receive codecs..';
expectEquals(expectedMsg, error);
// Got the right message, test succeeded.
document.title = 'OK';
assertEquals(expectedMsg, error);
reportTestSuccess();
};
navigator.webkitGetUserMedia({audio: true, video: true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
......@@ -164,10 +161,9 @@
onLocalDescriptionError = function(error) {
var expectedMsg = 'Failed to set local offer sdp:' +
' Called with SDP without DTLS fingerprint.';
expectEquals(expectedMsg, error);
// Got the right message, test succeeded.
document.title = 'OK';
assertEquals(expectedMsg, error);
reportTestSuccess();
};
navigator.webkitGetUserMedia({audio: true, video: true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
......@@ -179,7 +175,7 @@
createConnections(null);
transformSdp = addBandwithControl;
navigator.webkitGetUserMedia({audio: true, video: true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1');
waitForVideo('remote-view-2');
}
......@@ -244,9 +240,7 @@
// Set an event handler for when the data channel has been closed.
setAllEventsOccuredHandler(function() {
// When the video is flowing the test is done.
setAllEventsOccuredHandler(function() {
document.title = 'OK';
});
setAllEventsOccuredHandler(reportTestSuccess);
navigator.webkitGetUserMedia({audio: true, video: true},
addStreamToBothConnectionsAndNegotiate, printGetUserMediaError);
waitForVideo('remote-view-1');
......@@ -266,7 +260,6 @@
dtmfSender.ontonechange = onToneChange;
dtmfSender.insertDTMF(tones);
// Wait for the DTMF tones callback.
document.title = 'Waiting for dtmf...';
addExpectedEvent();
var waitDtmf = setInterval(function() {
if (gSentTones == tones) {
......@@ -323,7 +316,7 @@
setTimeout(function() {
gatherAudioLevelSamples(gSecondConnection, 200, 100, function(samples) {
verifyIsSilent(samples);
document.title = 'OK';
reportTestSuccess();
});
}, 500);
});
......@@ -384,7 +377,7 @@
// received.
addExpectedEvent();
remote_stream_1.onaddtrack = function(){
expectEquals(remote_stream_1.getAudioTracks()[0].id,
assertEquals(remote_stream_1.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
......@@ -402,7 +395,7 @@
remote_stream_2 = gSecondConnection.getRemoteStreams()[0];
addExpectedEvent();
remote_stream_2.onaddtrack = function() {
expectEquals(remote_stream_2.getAudioTracks()[0].id,
assertEquals(remote_stream_2.getAudioTracks()[0].id,
local_stream.getAudioTracks()[0].id);
eventOccured();
}
......@@ -414,7 +407,7 @@
eventOccured();
}
// When all the above events have occurred- the test pass.
setAllEventsOccuredHandler(function() { document.title = 'OK'; });
setAllEventsOccuredHandler(reportTestSuccess);
local_stream.addTrack(gLocalStream.getAudioTracks()[0]);
local_stream.removeTrack(local_stream.getVideoTracks()[0]);
......@@ -433,26 +426,28 @@
var sendDataString = "send some text on a data channel."
firstDataChannel = gFirstConnection.createDataChannel(
"sendDataChannel", params);
expectEquals('connecting', firstDataChannel.readyState);
assertEquals('connecting', firstDataChannel.readyState);
// When |firstDataChannel| transition to open state, send a text string.
firstDataChannel.onopen = function() {
expectEquals('open', firstDataChannel.readyState);
assertEquals('open', firstDataChannel.readyState);
firstDataChannel.send(sendDataString);
}
// When |firstDataChannel| receive a message, close the channel and
// initiate a new offer/answer exchange to complete the closure.
firstDataChannel.onmessage = function(event) {
expectEquals(event.data, sendDataString);
assertEquals(event.data, sendDataString);
firstDataChannel.close();
negotiate();
}
// When |firstDataChannel| transition to closed state, the test pass.
// TODO(phoglund): This is flaky, at least in the setupLegacyCall case.
// See http://crbug.com/350388.
addExpectedEvent();
firstDataChannel.onclose = function() {
expectEquals('closed', firstDataChannel.readyState);
assertEquals('closed', firstDataChannel.readyState);
eventOccured();
}
......@@ -462,8 +457,8 @@
// When |secondDataChannel| receive a message, send a message back.
secondDataChannel.onmessage = function(event) {
expectEquals(event.data, sendDataString);
expectEquals('open', secondDataChannel.readyState);
assertEquals(event.data, sendDataString);
assertEquals('open', secondDataChannel.readyState);
secondDataChannel.send(sendDataString);
}
}
......@@ -478,18 +473,18 @@
var sendDataString = "send some text on a data channel."
firstDataChannel = gFirstConnection.createDataChannel(
"sendDataChannel", params);
expectEquals('connecting', firstDataChannel.readyState);
assertEquals('connecting', firstDataChannel.readyState);
// When |firstDataChannel| transition to open state, send a text string.
firstDataChannel.onopen = function() {
expectEquals('open', firstDataChannel.readyState);
assertEquals('open', firstDataChannel.readyState);
}
// When |firstDataChannel| receive a message, send message back.
// initiate a new offer/answer exchange to complete the closure.
firstDataChannel.onmessage = function(event) {
expectEquals('open', firstDataChannel.readyState);
expectEquals(event.data, sendDataString);
assertEquals('open', firstDataChannel.readyState);
assertEquals(event.data, sendDataString);
firstDataChannel.send(sendDataString);
}
......@@ -504,8 +499,8 @@
// When |secondDataChannel| receive a message, close the channel and
// initiate a new offer/answer exchange to complete the closure.
secondDataChannel.onmessage = function(event) {
expectEquals(event.data, sendDataString);
expectEquals('open', secondDataChannel.readyState);
assertEquals(event.data, sendDataString);
assertEquals('open', secondDataChannel.readyState);
secondDataChannel.close();
negotiate();
}
......@@ -513,7 +508,7 @@
// When |secondDataChannel| transition to closed state, the test pass.
addExpectedEvent();
secondDataChannel.onclose = function() {
expectEquals('closed', secondDataChannel.readyState);
assertEquals('closed', secondDataChannel.readyState);
eventOccured();
}
}
......@@ -529,15 +524,14 @@
function onToneChange(tone) {
gSentTones += tone.tone;
document.title = gSentTones;
}
function createConnections(constraints) {
gFirstConnection = createConnection(constraints, 'remote-view-1');
expectEquals('stable', gFirstConnection.signalingState);
assertEquals('stable', gFirstConnection.signalingState);
gSecondConnection = createConnection(constraints, 'remote-view-2');
expectEquals('stable', gSecondConnection.signalingState);
assertEquals('stable', gSecondConnection.signalingState);
}
function createConnection(constraints, remoteView) {
......@@ -557,12 +551,12 @@
// Called if getUserMedia fails.
function printGetUserMediaError(error) {
document.title = 'getUserMedia request failed:';
var message = 'getUserMedia request unexpectedly failed:';
if (error.constraintName)
document.title += ' could not satisfy constraint ' + error.constraintName;
message += ' could not satisfy constraint ' + error.constraintName;
else
document.title += ' devices not working/user denied access.';
console.log(document.title);
message += ' devices not working/user denied access.';
failTest(message);
}
// Called if getUserMedia succeeds and we want to send from both connections.
......@@ -581,8 +575,8 @@
}
function verifyHasOneAudioAndVideoTrack(stream) {
expectEquals(1, stream.getAudioTracks().length);
expectEquals(1, stream.getVideoTracks().length);
assertEquals(1, stream.getAudioTracks().length);
assertEquals(1, stream.getVideoTracks().length);
}
// Called if getUserMedia succeeds, then clone the stream, send two streams
......@@ -601,7 +595,7 @@
gFirstConnection.addStream(clonedStream);
// Verify the local streams are correct.
expectEquals(2, gFirstConnection.getLocalStreams().length);
assertEquals(2, gFirstConnection.getLocalStreams().length);
verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[0]);
verifyHasOneAudioAndVideoTrack(gFirstConnection.getLocalStreams()[1]);
......@@ -614,11 +608,11 @@
}
setAllEventsOccuredHandler(function() {
// Negotiation complete, verify remote streams on the receiving side.
expectEquals(2, gSecondConnection.getRemoteStreams().length);
assertEquals(2, gSecondConnection.getRemoteStreams().length);
verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[0]);
verifyHasOneAudioAndVideoTrack(gSecondConnection.getRemoteStreams()[1]);
document.title = "OK";
reportTestSuccess();
});
negotiate();
......@@ -658,7 +652,7 @@
function onOfferCreated(offer, caller, callee) {
offer.sdp = maybeForceIsac16K(transformSdp(offer.sdp));
caller.setLocalDescription(offer, function() {
expectEquals('have-local-offer', caller.signalingState);
assertEquals('have-local-offer', caller.signalingState);
receiveOffer(offer.sdp, caller, callee);
}, onLocalDescriptionError);
}
......@@ -673,7 +667,7 @@
callee.createAnswer(function (answer) {
onAnswerCreated(answer, caller, callee);
});
expectEquals('have-remote-offer', callee.signalingState);
assertEquals('have-remote-offer', callee.signalingState);
}
function removeMsid(offerSdp) {
......@@ -703,7 +697,7 @@
'b=AS:512\r\n');
return offerSdp;
}
function removeBundle(sdp) {
return sdp.replace(/a=group:BUNDLE .*\r\n/g, '');
}
......@@ -733,7 +727,7 @@
function onAnswerCreated(answer, caller, callee) {
answer.sdp = maybeForceIsac16K(transformSdp(answer.sdp));
callee.setLocalDescription(answer);
expectEquals('stable', callee.signalingState);
assertEquals('stable', callee.signalingState);
receiveAnswer(answer.sdp, caller);
}
......@@ -743,7 +737,7 @@
var parsedAnswer = new RTCSessionDescription({ type: 'answer',
sdp: answerSdp });
caller.setRemoteDescription(parsedAnswer);
expectEquals('stable', caller.signalingState);
assertEquals('stable', caller.signalingState);
}
function connectOnIceCandidate(caller, callee) {
......@@ -768,9 +762,8 @@
function onRemoteStream(e, target) {
console.log("Receiving remote stream...");
if (gTestWithoutMsid && e.stream.id != "default") {
document.title = 'a default remote stream was expected but instead ' +
e.stream.id + ' was received.';
return;
failTest('a default remote stream was expected but instead ' +
e.stream.id + ' was received.');
}
gRemoteStreams[target] = e.stream;
var remoteStreamUrl = URL.createObjectURL(e.stream);
......
......@@ -70,7 +70,7 @@ function getAudioLevelFromStats_(response) {
}
}
// Should only be one audio level reported, otherwise we get confused.
expectEquals(1, audioOutputLevels.length);
assertEquals(1, audioOutputLevels.length);
return audioOutputLevels[0];
}
......@@ -24,6 +24,23 @@ function setAllEventsOccuredHandler(handler) {
gAllEventsOccured = handler;
}
// Tells the C++ code we succeeded, which will generally exit the test.
function reportTestSuccess() {
window.domAutomationController.send('OK');
}
// Returns a custom return value to the test.
function sendValueToTest(value) {
window.domAutomationController.send(value);
}
// Immediately fails the test on the C++ side and throw an exception to
// stop execution on the javascript side.
function failTest(reason) {
var error = new Error(reason);
window.domAutomationController.send(error.stack);
}
function detectVideoPlaying(videoElementName, callback) {
detectVideo(videoElementName, isVideoPlaying, callback);
}
......@@ -62,13 +79,11 @@ function detectVideo(videoElementName, predicate, callback) {
}
function waitForVideo(videoElement) {
document.title = 'Waiting for video...';
addExpectedEvent();
detectVideoPlaying(videoElement, function () { eventOccured(); });
}
function waitForVideoToStop(videoElement) {
document.title = 'Waiting for video to stop...';
addExpectedEvent();
detectVideoStopped(videoElement, function () { eventOccured(); });
}
......@@ -82,10 +97,15 @@ function waitForConnectionToStabilize(peerConnection, callback) {
}, 100);
}
// Adds an expected event. You may call this function many times to add more
// expected events. Each expected event must later be matched by a call to
// eventOccurred. When enough events have occurred, the "all events occurred
// handler" will be called.
function addExpectedEvent() {
++gNumberOfExpectedEvents;
}
// See addExpectedEvent.
function eventOccured() {
++gNumberOfEvents;
if (gNumberOfEvents == gNumberOfExpectedEvents) {
......@@ -104,99 +124,11 @@ function isVideoPlaying(pixels, previousPixels) {
return false;
}
// This function matches |left| and |right| and throws an exception if the
// values don't match.
function expectEquals(left, right) {
if (left != right) {
var s = "expectEquals failed left: " + left + " right: " + right;
document.title = s;
throw s;
// This function matches |left| and |right| and fails the test if the
// values don't match using normal javascript equality (i.e. the hard
// types of the operands aren't checked).
function assertEquals(expected, actual) {
if (actual != expected) {
failTest("expected '" + expected + "', got '" + actual + "'.");
}
}
// This function tries to calculate the aspect ratio shown by the fake capture
// device in the video tag. For this, we count the amount of light green pixels
// along |aperture| pixels on the positive X and Y axis starting from the
// center of the image. In this very center there should be a time-varying
// pacman; the algorithm counts for a couple of iterations and keeps the
// maximum amount of light green pixels on both directions. From this data
// the aspect ratio is calculated relative to a 320x240 window, so 4:3 would
// show as a 1. Furthermore, since an original non-4:3 might be letterboxed or
// cropped, the actual X and Y pixel amounts are compared with the fake video
// capture expected pacman radius (see further below).
function detectAspectRatio(callback) {
var width = VIDEO_TAG_WIDTH;
var height = VIDEO_TAG_HEIGHT;
var videoElement = $('local-view');
var canvas = $('local-view-canvas');
var maxLightGreenPixelsX = 0;
var maxLightGreenPixelsY = 0;
var aperture = Math.min(width, height) / 2;
var iterations = 0;
var maxIterations = 10;
var waitVideo = setInterval(function() {
var context = canvas.getContext('2d');
context.drawImage(videoElement, 0, 0, width, height);
// We are interested in a window starting from the center of the image
// where we expect the circle from the fake video capture to be rolling.
var pixels =
context.getImageData(width / 2, height / 2, aperture, aperture);
var lightGreenPixelsX = 0;
var lightGreenPixelsY = 0;
// Walk horizontally counting light green pixels.
for (var x = 0; x < aperture; ++x) {
if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN)
lightGreenPixelsX++;
}
// Walk vertically counting light green pixels.
for (var y = 0; y < aperture; ++y) {
if (pixels.data[4 * y * aperture + 1] != 135)
lightGreenPixelsY++;
}
if (lightGreenPixelsX > maxLightGreenPixelsX &&
lightGreenPixelsX < aperture)
maxLightGreenPixelsX = lightGreenPixelsX;
if (lightGreenPixelsY > maxLightGreenPixelsY &&
lightGreenPixelsY < aperture)
maxLightGreenPixelsY = lightGreenPixelsY;
var detectedAspectRatioString = "";
if (++iterations > maxIterations) {
clearInterval(waitVideo);
observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX;
// At this point the observed aspect ratio is either 1, for undistorted
// 4:3, or some other aspect ratio that is seen as distorted.
if (Math.abs(observedAspectRatio - 1.333) < 0.1)
detectedAspectRatioString = "16:9";
else if (Math.abs(observedAspectRatio - 1.20) < 0.1)
detectedAspectRatioString = "16:10";
else if (Math.abs(observedAspectRatio - 1.0) < 0.1)
detectedAspectRatioString = "4:3";
else
detectedAspectRatioString = "UNKNOWN aspect ratio";
console.log(detectedAspectRatioString + " observed aspect ratio (" +
observedAspectRatio + ")");
// The FakeVideoCapture calculates the circle radius as
// std::min(capture_format_.width, capture_format_.height) / 4;
// we do the same and see if both dimensions are scaled, meaning
// we started from a cropped or stretched image.
var nonDistortedRadius = Math.min(width, height) / 4;
if ((maxLightGreenPixelsX != nonDistortedRadius) &&
(maxLightGreenPixelsY != nonDistortedRadius)) {
detectedAspectRatioString += " cropped";
} else
detectedAspectRatioString += " letterbox";
console.log("Original image is: " + detectedAspectRatioString);
callback(detectedAspectRatioString);
}
},
50);
}
......@@ -33,11 +33,6 @@ void WebRtcContentBrowserTest::SetUp() {
ContentBrowserTest::SetUp();
}
bool WebRtcContentBrowserTest::ExecuteJavascript(
const std::string& javascript) {
return ExecuteScript(shell()->web_contents(), javascript);
}
// Executes |javascript|. The script is required to use
// window.domAutomationController.send to send a string value back to here.
std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult(
......@@ -49,12 +44,14 @@ std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult(
return result;
}
void WebRtcContentBrowserTest::ExpectTitle(
const std::string& expected_title) const {
base::string16 expected_title16(base::ASCIIToUTF16(expected_title));
TitleWatcher title_watcher(shell()->web_contents(), expected_title16);
EXPECT_EQ(expected_title16, title_watcher.WaitAndGetTitle());
}
void WebRtcContentBrowserTest::ExecuteJavascriptAndWaitForOk(
const std::string& javascript) {
std::string result = ExecuteJavascriptAndReturnResult(javascript);
if (result != "OK") {
printf("From javascript: %s", result.c_str());
FAIL();
}
}
std::string WebRtcContentBrowserTest::GenerateGetUserMediaCall(
const char* function_name,
......
......@@ -15,16 +15,14 @@ class WebRtcContentBrowserTest: public ContentBrowserTest {
virtual void SetUp() OVERRIDE;
protected:
// Executes |javascript| and returns after it has been executed.
bool ExecuteJavascript(const std::string& javascript);
// Executes |javascript|. The script is required to use
// window.domAutomationController.send to send a string value back to here.
std::string ExecuteJavascriptAndReturnResult(
const std::string& javascript);
// Waits for the page title to be set to |expected_title|.
void ExpectTitle(const std::string& expected_title) const;
// Waits for the javascript to return OK via the automation controller.
// If the javascript returns != OK or times out, we fail the test.
void ExecuteJavascriptAndWaitForOk(const std::string& javascript);
// Generates javascript code for a getUserMedia call.
std::string GenerateGetUserMediaCall(const char* function_name,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment