Commit 43ebe9ea authored by phoglund@chromium.org's avatar phoglund@chromium.org

Refactored how WebRTC content browser tests talk to their javascript.

We will now use the domAutomationController rather than looking at the
page title. This will confer the following advantages:

- Tests will fail right away when the js detects an error, rather than
  timing out.
- We will get a lot better stack traces when errors do occur.
- The communication path to the tests becomes more flexible with
  possibility for custom return values (for instance if we need to
  verify something in the C++ code or have a multi-stage test).

BUG=None
NOTRY=True

(doing notry since CQ gets stuck on telemetry test on mac_rel)

Review URL: https://codereview.chromium.org/190563002

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@255795 0039d316-1c4b-4281-b951-d872f2087c98
parent 35c55662
...@@ -67,12 +67,12 @@ IN_PROC_BROWSER_TEST_F(WebRtcAecDumpBrowserTest, MAYBE_CallWithAecDump) { ...@@ -67,12 +67,12 @@ IN_PROC_BROWSER_TEST_F(WebRtcAecDumpBrowserTest, MAYBE_CallWithAecDump) {
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
#if defined (OS_ANDROID) #if defined (OS_ANDROID)
// Always force iSAC 16K on Android for now (Opus is broken). // Always force iSAC 16K on Android for now (Opus is broken).
ASSERT_TRUE(ExecuteJavascript("forceIsac16KInSdp();")); EXPECT_EQ("isac-forced",
ExecuteJavascriptAndReturnResult("forceIsac16KInSdp();"));
#endif #endif
EXPECT_TRUE(ExecuteJavascript("call({video: true, audio: true});")); ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
ExpectTitle("OK");
EXPECT_TRUE(base::PathExists(dump_file_)); EXPECT_TRUE(base::PathExists(dump_file_));
int64 file_size = 0; int64 file_size = 0;
......
...@@ -34,17 +34,17 @@ class WebRtcBrowserTest : public WebRtcContentBrowserTest { ...@@ -34,17 +34,17 @@ class WebRtcBrowserTest : public WebRtcContentBrowserTest {
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html")); GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk(javascript);
DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk(javascript);
} }
void ExecuteTestAndWaitForOk(const std::string& javascript) { void DisableOpusIfOnAndroid() {
#if defined (OS_ANDROID) #if defined (OS_ANDROID)
// Always force iSAC 16K on Android for now (Opus is broken). // Always force iSAC 16K on Android for now (Opus is broken).
ASSERT_TRUE(ExecuteJavascript("forceIsac16KInSdp();")); EXPECT_EQ("isac-forced",
ExecuteJavascriptAndReturnResult("forceIsac16KInSdp();"));
#endif #endif
ASSERT_TRUE(ExecuteJavascript(javascript));
ExpectTitle("OK");
} }
}; };
...@@ -331,7 +331,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, MAYBE_CallWithAecDump) { ...@@ -331,7 +331,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, MAYBE_CallWithAecDump) {
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html")); GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk("call({video: true, audio: true});"); DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
EXPECT_TRUE(base::PathExists(dump_file)); EXPECT_TRUE(base::PathExists(dump_file));
int64 file_size = 0; int64 file_size = 0;
...@@ -367,7 +368,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest, ...@@ -367,7 +368,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html")); GURL url(embedded_test_server()->GetURL("/media/peerconnection-call.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ExecuteTestAndWaitForOk("call({video: true, audio: true});"); DisableOpusIfOnAndroid();
ExecuteJavascriptAndWaitForOk("call({video: true, audio: true});");
EXPECT_TRUE(base::PathExists(dump_file)); EXPECT_TRUE(base::PathExists(dump_file));
int64 file_size = 0; int64 file_size = 0;
......
...@@ -28,14 +28,14 @@ using trace_analyzer::TraceEventVector; ...@@ -28,14 +28,14 @@ using trace_analyzer::TraceEventVector;
namespace { namespace {
static const char kGetUserMediaAndStop[] = "getUserMediaAndStop"; static const char kGetUserMediaAndStop[] = "getUserMediaAndStop";
static const char kGetUserMediaAndWaitAndStop[] = "getUserMediaAndWaitAndStop"; static const char kGetUserMediaAndGetStreamUp[] = "getUserMediaAndGetStreamUp";
static const char kGetUserMediaAndAnalyseAndStop[] = static const char kGetUserMediaAndAnalyseAndStop[] =
"getUserMediaAndAnalyseAndStop"; "getUserMediaAndAnalyseAndStop";
static const char kGetUserMediaAndExpectFailure[] =
"getUserMediaAndExpectFailure";
// Results returned by JS. // Results returned by JS.
static const char kOK[] = "OK"; static const char kOK[] = "OK";
static const char kGetUserMediaFailed[] =
"GetUserMedia call failed with code undefined";
std::string GenerateGetUserMediaWithMandatorySourceID( std::string GenerateGetUserMediaWithMandatorySourceID(
const std::string& function_name, const std::string& function_name,
...@@ -109,20 +109,23 @@ class WebRtcGetUserMediaBrowserTest: public WebRtcContentBrowserTest { ...@@ -109,20 +109,23 @@ class WebRtcGetUserMediaBrowserTest: public WebRtcContentBrowserTest {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html")); GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
// Put getUserMedia to work and let it run for a couple of seconds. // Put getUserMedia to work and let it run for a couple of seconds.
DCHECK(time_to_sample_secs); DCHECK(time_to_sample_secs);
ASSERT_TRUE( ASSERT_EQ("ok-stream-running",
ExecuteJavascript(base::StringPrintf("%s({video: true}, %d);", ExecuteJavascriptAndReturnResult(
kGetUserMediaAndWaitAndStop, base::StringPrintf("%s({video: true});",
time_to_sample_secs))); kGetUserMediaAndGetStreamUp)));
// Make sure the stream is up and running, then start collecting traces. // Now the stream is up and running, start collecting traces.
ExpectTitle("Running...");
StartTracing(); StartTracing();
// Let the stream run for a while in javascript.
ExecuteJavascriptAndWaitForOk(
base::StringPrintf("waitAndStopVideoTrack(%d);", time_to_sample_secs));
// Wait until the page title changes to "OK". Do not sleep() here since that // Wait until the page title changes to "OK". Do not sleep() here since that
// would stop both this code and the browser underneath. // would stop both this code and the browser underneath.
ExpectTitle("OK");
StopTracing(); StopTracing();
scoped_ptr<TraceAnalyzer> analyzer(CreateTraceAnalyzer()); scoped_ptr<TraceAnalyzer> analyzer(CreateTraceAnalyzer());
...@@ -214,10 +217,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, GetVideoStreamAndStop) { ...@@ -214,10 +217,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, GetVideoStreamAndStop) {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html")); GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript( ExecuteJavascriptAndWaitForOk(
base::StringPrintf("%s({video: true});", kGetUserMediaAndStop))); base::StringPrintf("%s({video: true});", kGetUserMediaAndStop));
ExpectTitle("OK");
} }
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
...@@ -227,10 +228,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, ...@@ -227,10 +228,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html")); GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(base::StringPrintf( ExecuteJavascriptAndWaitForOk(base::StringPrintf(
"%s({video: true, audio: true});", kGetUserMediaAndStop))); "%s({video: true, audio: true});", kGetUserMediaAndStop));
ExpectTitle("OK");
} }
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
...@@ -240,9 +239,7 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, ...@@ -240,9 +239,7 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html")); GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript("getUserMediaAndClone();")); ExecuteJavascriptAndWaitForOk("getUserMediaAndClone();");
ExpectTitle("OK");
} }
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
...@@ -282,25 +279,25 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, ...@@ -282,25 +279,25 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
// Test with invalid mandatory audio sourceID. // Test with invalid mandatory audio sourceID.
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult( ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID( GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop, kGetUserMediaAndExpectFailure,
"something invalid", "something invalid",
video_ids[0]))); video_ids[0]));
// Test with invalid mandatory video sourceID. // Test with invalid mandatory video sourceID.
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult( ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID( GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop, kGetUserMediaAndExpectFailure,
audio_ids[0], audio_ids[0],
"something invalid"))); "something invalid"));
// Test with empty mandatory audio sourceID. // Test with empty mandatory audio sourceID.
EXPECT_EQ(kGetUserMediaFailed, ExecuteJavascriptAndReturnResult( ExecuteJavascriptAndWaitForOk(
GenerateGetUserMediaWithMandatorySourceID( GenerateGetUserMediaWithMandatorySourceID(
kGetUserMediaAndStop, kGetUserMediaAndExpectFailure,
"", "",
video_ids[0]))); video_ids[0]));
} }
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
...@@ -342,10 +339,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, TwoGetUserMediaAndStop) { ...@@ -342,10 +339,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, TwoGetUserMediaAndStop) {
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html")); GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript( ExecuteJavascriptAndWaitForOk(
"twoGetUserMediaAndStop({video: true, audio: true});")); "twoGetUserMediaAndStop({video: true, audio: true});");
ExpectTitle("OK");
} }
// This test will make a simple getUserMedia page, verify that video is playing // This test will make a simple getUserMedia page, verify that video is playing
...@@ -388,8 +383,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, ...@@ -388,8 +383,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
// TODO(mcasas): add more aspect ratios, in particular 16:10 crbug.com/275594. // TODO(mcasas): add more aspect ratios, in particular 16:10 crbug.com/275594.
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(constraints_4_3)); ASSERT_EQ("4:3 letterbox",
ExpectTitle("4:3 letterbox"); ExecuteJavascriptAndReturnResult(constraints_4_3));
} }
// This test calls getUserMedia and checks for aspect ratio behavior. // This test calls getUserMedia and checks for aspect ratio behavior.
...@@ -406,8 +401,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest, ...@@ -406,8 +401,8 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
kGetUserMediaAndAnalyseAndStop, 640, 640, 360, 360, 30, 30); kGetUserMediaAndAnalyseAndStop, 640, 640, 360, 360, 30, 30);
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(constraints_16_9)); ASSERT_EQ("16:9 letterbox",
ExpectTitle("16:9 letterbox"); ExecuteJavascriptAndReturnResult(constraints_16_9));
} }
namespace { namespace {
...@@ -449,8 +444,7 @@ IN_PROC_BROWSER_TEST_P(WebRtcConstraintsBrowserTest, GetUserMediaConstraints) { ...@@ -449,8 +444,7 @@ IN_PROC_BROWSER_TEST_P(WebRtcConstraintsBrowserTest, GetUserMediaConstraints) {
user_media().max_frame_rate); user_media().max_frame_rate);
DVLOG(1) << "Calling getUserMedia: " << call; DVLOG(1) << "Calling getUserMedia: " << call;
NavigateToURL(shell(), url); NavigateToURL(shell(), url);
ASSERT_TRUE(ExecuteJavascript(call)); ExecuteJavascriptAndWaitForOk(call);
ExpectTitle("OK");
} }
static const UserMediaSizes kAllUserMediaSizes[] = { static const UserMediaSizes kAllUserMediaSizes[] = {
......
...@@ -10,15 +10,9 @@ ...@@ -10,15 +10,9 @@
setAllEventsOccuredHandler(function() { setAllEventsOccuredHandler(function() {
gLocalStream.stop(); gLocalStream.stop();
document.title = 'OK'; reportTestSuccess();
sendValueToTest(document.title);
}); });
function sendValueToTest(value) {
window.domAutomationController.setAutomationId(0);
window.domAutomationController.send(value);
}
function getSources() { function getSources() {
MediaStreamTrack.getSources(function(devices) { MediaStreamTrack.getSources(function(devices) {
document.title = 'Sources Available'; document.title = 'Sources Available';
...@@ -29,30 +23,42 @@ ...@@ -29,30 +23,42 @@
// Creates a MediaStream and renders it locally. When the video is detected to // Creates a MediaStream and renders it locally. When the video is detected to
// be rolling, the title is changed and the stream should be stopped. // be rolling, the title is changed and the stream should be stopped.
function getUserMediaAndStop(constraints) { function getUserMediaAndStop(constraints) {
document.title = 'Calling GetUserMedia'; console.log('Calling getUserMediaAndStop.');
navigator.webkitGetUserMedia( navigator.webkitGetUserMedia(
constraints, constraints,
function(stream) { displayAndDetectVideo(stream, stopVideoTrack); }, function(stream) { displayAndDetectVideo(stream, stopVideoTrack); },
failedCallback); failedCallback);
} }
// Requests getusermedia and expects it to fail.
function getUserMediaAndExpectFailure(constraints) {
console.log('Calling getUserMediaAndExpectFailure.');
navigator.webkitGetUserMedia(
constraints,
function(stream) { failTest('Unexpectedly succeeded getUserMedia.'); },
function(error) { reportTestSuccess(); });
}
// Creates a MediaStream and renders it locally. When the video is detected to // Creates a MediaStream and renders it locally. When the video is detected to
// be rolling, the title should be changed and the stream is let roll for a // be rolling we return ok-stream-running through the automation controller.
// number |waitTimeInSeconds| and then it should be stopped. function getUserMediaAndGetStreamUp(constraints, waitTimeInSeconds) {
function getUserMediaAndWaitAndStop(constraints, waitTimeInSeconds) { console.log('Calling getUserMediaAndGetStreamUp.');
navigator.webkitGetUserMedia( navigator.webkitGetUserMedia(
constraints, constraints,
function(stream) { function(stream) {
displayAndDetectVideo( displayAndDetectVideo(
stream, stream,
function() { function() {
waitAndStopVideoTrack(waitTimeInSeconds); sendValueToTest('ok-stream-running');
}); });
}, },
failedCallback); failedCallback);
} }
// Gets a video stream up, analyses it and returns the aspect ratio to the
// test through the automation controller.
function getUserMediaAndAnalyseAndStop(constraints) { function getUserMediaAndAnalyseAndStop(constraints) {
console.log('Calling getUserMediaAndAnalyseAndStop.');
navigator.webkitGetUserMedia( navigator.webkitGetUserMedia(
constraints, displayDetectAndAnalyzeVideo, failedCallback); constraints, displayDetectAndAnalyzeVideo, failedCallback);
} }
...@@ -60,6 +66,7 @@ ...@@ -60,6 +66,7 @@
// This test that a MediaStream can be cloned and that the clone can // This test that a MediaStream can be cloned and that the clone can
// be rendered. // be rendered.
function getUserMediaAndClone() { function getUserMediaAndClone() {
console.log('Calling getUserMediaAndClone.');
navigator.webkitGetUserMedia({video: true, audio: true}, navigator.webkitGetUserMedia({video: true, audio: true},
createAndRenderClone, failedCallback); createAndRenderClone, failedCallback);
} }
...@@ -69,36 +76,35 @@ ...@@ -69,36 +76,35 @@
// streams have the same source, both video streams should stop. If they do, // streams have the same source, both video streams should stop. If they do,
// the test succeeds. // the test succeeds.
function twoGetUserMediaAndStop(constraints) { function twoGetUserMediaAndStop(constraints) {
document.title = 'Calling Two GetUserMedia'; console.log('Calling Two GetUserMedia');
navigator.webkitGetUserMedia( navigator.webkitGetUserMedia(
constraints, constraints,
function(stream) { function(stream) {
displayAndDetectVideo(stream, requestSecondGetUserMedia); displayAndDetectVideo(stream, requestSecondGetUserMedia);
}, },
failedCallback); failedCallback);
var requestSecondGetUserMedia = function() { var requestSecondGetUserMedia = function() {
navigator.webkitGetUserMedia( navigator.webkitGetUserMedia(
constraints, constraints,
function(stream) { function(stream) {
displayIntoVideoElement(stream, displayIntoVideoElement(stream,
stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2'); stopStreamAndVerifyAllLocalViewsDontPlayVideo, 'local-view-2');
}, },
failedCallback); failedCallback);
}; };
var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() { var stopStreamAndVerifyAllLocalViewsDontPlayVideo = function() {
gLocalStream.getVideoTracks()[0].stop(); gLocalStream.getVideoTracks()[0].stop();
// Since local-view and local-view-2 are playing the video from the same // Since local-view and local-view-2 are playing the video from the same
// source, both of them should stop. // source, both of them should stop.
waitForVideoToStop('local-view'); waitForVideoToStop('local-view');
waitForVideoToStop('local-view-2'); waitForVideoToStop('local-view-2');
}; };
} }
function failedCallback(error) { function failedCallback(error) {
document.title = 'GetUserMedia call failed with code ' + error.code; failTest('GetUserMedia call failed with code ' + error.code);
sendValueToTest(document.title);
} }
function plugStreamIntoVideoElement(stream, videoElement) { function plugStreamIntoVideoElement(stream, videoElement) {
...@@ -109,7 +115,6 @@ ...@@ -109,7 +115,6 @@
function displayIntoVideoElement(stream, callback, videoElement) { function displayIntoVideoElement(stream, callback, videoElement) {
plugStreamIntoVideoElement(stream, videoElement); plugStreamIntoVideoElement(stream, videoElement);
document.title = 'Waiting for video...';
detectVideoPlaying(videoElement, callback); detectVideoPlaying(videoElement, callback);
} }
...@@ -128,12 +133,12 @@ ...@@ -128,12 +133,12 @@
// work with audio devices and not all bots has a microphone. // work with audio devices and not all bots has a microphone.
new_stream = new webkitMediaStream(); new_stream = new webkitMediaStream();
new_stream.addTrack(stream.getVideoTracks()[0]); new_stream.addTrack(stream.getVideoTracks()[0]);
expectEquals(new_stream.getVideoTracks().length, 1); assertEquals(new_stream.getVideoTracks().length, 1);
if (stream.getAudioTracks().length > 0) { if (stream.getAudioTracks().length > 0) {
new_stream.addTrack(stream.getAudioTracks()[0]); new_stream.addTrack(stream.getAudioTracks()[0]);
expectEquals(new_stream.getAudioTracks().length, 1); assertEquals(new_stream.getAudioTracks().length, 1);
new_stream.removeTrack(new_stream.getAudioTracks()[0]); new_stream.removeTrack(new_stream.getAudioTracks()[0]);
expectEquals(new_stream.getAudioTracks().length, 0); assertEquals(new_stream.getAudioTracks().length, 0);
} }
var newStreamUrl = URL.createObjectURL(new_stream); var newStreamUrl = URL.createObjectURL(new_stream);
...@@ -147,19 +152,102 @@ ...@@ -147,19 +152,102 @@
} }
function waitAndStopVideoTrack(waitTimeInSeconds) { function waitAndStopVideoTrack(waitTimeInSeconds) {
document.title = 'Running...';
setTimeout(stopVideoTrack, waitTimeInSeconds * 1000); setTimeout(stopVideoTrack, waitTimeInSeconds * 1000);
} }
function analyzeVideo() { function analyzeVideo() {
document.title = 'Waiting for video...';
addExpectedEvent();
detectAspectRatio(function(aspectRatio) { detectAspectRatio(function(aspectRatio) {
document.title = aspectRatio; sendValueToTest(aspectRatio);
eventOccured();
}); });
} }
// This function tries to calculate the aspect ratio shown by the fake capture
// device in the video tag. For this, we count the amount of light green
// pixels along |aperture| pixels on the positive X and Y axis starting from
// the center of the image. In this very center there should be a time-varying
// pacman; the algorithm counts for a couple of iterations and keeps the
// maximum amount of light green pixels on both directions. From this data
// the aspect ratio is calculated relative to a 320x240 window, so 4:3 would
// show as a 1. Furthermore, since an original non-4:3 might be letterboxed or
// cropped, the actual X and Y pixel amounts are compared with the fake video
// capture expected pacman radius (see further below).
function detectAspectRatio(callback) {
var width = VIDEO_TAG_WIDTH;
var height = VIDEO_TAG_HEIGHT;
var videoElement = $('local-view');
var canvas = $('local-view-canvas');
var maxLightGreenPixelsX = 0;
var maxLightGreenPixelsY = 0;
var aperture = Math.min(width, height) / 2;
var iterations = 0;
var maxIterations = 10;
var detectorFunction = function() {
var context = canvas.getContext('2d');
context.drawImage(videoElement, 0, 0, width, height);
// We are interested in a window starting from the center of the image
// where we expect the circle from the fake video capture to be rolling.
var pixels = context.getImageData(width / 2, height / 2,
aperture, aperture);
var lightGreenPixelsX = 0;
var lightGreenPixelsY = 0;
// Walk horizontally counting light green pixels.
for (var x = 0; x < aperture; ++x) {
if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN)
lightGreenPixelsX++;
}
// Walk vertically counting light green pixels.
for (var y = 0; y < aperture; ++y) {
if (pixels.data[4 * y * aperture + 1] != 135)
lightGreenPixelsY++;
}
if (lightGreenPixelsX > maxLightGreenPixelsX &&
lightGreenPixelsX < aperture)
maxLightGreenPixelsX = lightGreenPixelsX;
if (lightGreenPixelsY > maxLightGreenPixelsY &&
lightGreenPixelsY < aperture)
maxLightGreenPixelsY = lightGreenPixelsY;
var detectedAspectRatioString = "";
if (++iterations > maxIterations) {
clearInterval(detectorFunction);
observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX;
// At this point the observed aspect ratio is either 1, for undistorted
// 4:3, or some other aspect ratio that is seen as distorted.
if (Math.abs(observedAspectRatio - 1.333) < 0.1)
detectedAspectRatioString = "16:9";
else if (Math.abs(observedAspectRatio - 1.20) < 0.1)
detectedAspectRatioString = "16:10";
else if (Math.abs(observedAspectRatio - 1.0) < 0.1)
detectedAspectRatioString = "4:3";
else
detectedAspectRatioString = "UNKNOWN aspect ratio";
console.log(detectedAspectRatioString + " observed aspect ratio (" +
observedAspectRatio + ")");
// The FakeVideoCapture calculates the circle radius as
// std::min(capture_format_.width, capture_format_.height) / 4;
// we do the same and see if both dimensions are scaled, meaning
// we started from a cropped or stretched image.
var nonDistortedRadius = Math.min(width, height) / 4;
if ((maxLightGreenPixelsX != nonDistortedRadius) &&
(maxLightGreenPixelsY != nonDistortedRadius)) {
detectedAspectRatioString += " cropped";
} else
detectedAspectRatioString += " letterbox";
console.log("Original image is: " + detectedAspectRatioString);
callback(detectedAspectRatioString);
}
}
setInterval(detectorFunction, 50);
}
</script> </script>
</head> </head>
<body> <body>
......
...@@ -70,7 +70,7 @@ function getAudioLevelFromStats_(response) { ...@@ -70,7 +70,7 @@ function getAudioLevelFromStats_(response) {
} }
} }
// Should only be one audio level reported, otherwise we get confused. // Should only be one audio level reported, otherwise we get confused.
expectEquals(1, audioOutputLevels.length); assertEquals(1, audioOutputLevels.length);
return audioOutputLevels[0]; return audioOutputLevels[0];
} }
...@@ -24,6 +24,23 @@ function setAllEventsOccuredHandler(handler) { ...@@ -24,6 +24,23 @@ function setAllEventsOccuredHandler(handler) {
gAllEventsOccured = handler; gAllEventsOccured = handler;
} }
// Tells the C++ code we succeeded, which will generally exit the test.
function reportTestSuccess() {
window.domAutomationController.send('OK');
}
// Returns a custom return value to the test.
function sendValueToTest(value) {
window.domAutomationController.send(value);
}
// Immediately fails the test on the C++ side and throw an exception to
// stop execution on the javascript side.
function failTest(reason) {
var error = new Error(reason);
window.domAutomationController.send(error.stack);
}
function detectVideoPlaying(videoElementName, callback) { function detectVideoPlaying(videoElementName, callback) {
detectVideo(videoElementName, isVideoPlaying, callback); detectVideo(videoElementName, isVideoPlaying, callback);
} }
...@@ -62,13 +79,11 @@ function detectVideo(videoElementName, predicate, callback) { ...@@ -62,13 +79,11 @@ function detectVideo(videoElementName, predicate, callback) {
} }
function waitForVideo(videoElement) { function waitForVideo(videoElement) {
document.title = 'Waiting for video...';
addExpectedEvent(); addExpectedEvent();
detectVideoPlaying(videoElement, function () { eventOccured(); }); detectVideoPlaying(videoElement, function () { eventOccured(); });
} }
function waitForVideoToStop(videoElement) { function waitForVideoToStop(videoElement) {
document.title = 'Waiting for video to stop...';
addExpectedEvent(); addExpectedEvent();
detectVideoStopped(videoElement, function () { eventOccured(); }); detectVideoStopped(videoElement, function () { eventOccured(); });
} }
...@@ -82,10 +97,15 @@ function waitForConnectionToStabilize(peerConnection, callback) { ...@@ -82,10 +97,15 @@ function waitForConnectionToStabilize(peerConnection, callback) {
}, 100); }, 100);
} }
// Adds an expected event. You may call this function many times to add more
// expected events. Each expected event must later be matched by a call to
// eventOccurred. When enough events have occurred, the "all events occurred
// handler" will be called.
function addExpectedEvent() { function addExpectedEvent() {
++gNumberOfExpectedEvents; ++gNumberOfExpectedEvents;
} }
// See addExpectedEvent.
function eventOccured() { function eventOccured() {
++gNumberOfEvents; ++gNumberOfEvents;
if (gNumberOfEvents == gNumberOfExpectedEvents) { if (gNumberOfEvents == gNumberOfExpectedEvents) {
...@@ -104,99 +124,11 @@ function isVideoPlaying(pixels, previousPixels) { ...@@ -104,99 +124,11 @@ function isVideoPlaying(pixels, previousPixels) {
return false; return false;
} }
// This function matches |left| and |right| and throws an exception if the // This function matches |left| and |right| and fails the test if the
// values don't match. // values don't match using normal javascript equality (i.e. the hard
function expectEquals(left, right) { // types of the operands aren't checked).
if (left != right) { function assertEquals(expected, actual) {
var s = "expectEquals failed left: " + left + " right: " + right; if (actual != expected) {
document.title = s; failTest("expected '" + expected + "', got '" + actual + "'.");
throw s;
} }
} }
// This function tries to calculate the aspect ratio shown by the fake capture
// device in the video tag. For this, we count the amount of light green pixels
// along |aperture| pixels on the positive X and Y axis starting from the
// center of the image. In this very center there should be a time-varying
// pacman; the algorithm counts for a couple of iterations and keeps the
// maximum amount of light green pixels on both directions. From this data
// the aspect ratio is calculated relative to a 320x240 window, so 4:3 would
// show as a 1. Furthermore, since an original non-4:3 might be letterboxed or
// cropped, the actual X and Y pixel amounts are compared with the fake video
// capture expected pacman radius (see further below).
function detectAspectRatio(callback) {
var width = VIDEO_TAG_WIDTH;
var height = VIDEO_TAG_HEIGHT;
var videoElement = $('local-view');
var canvas = $('local-view-canvas');
var maxLightGreenPixelsX = 0;
var maxLightGreenPixelsY = 0;
var aperture = Math.min(width, height) / 2;
var iterations = 0;
var maxIterations = 10;
var waitVideo = setInterval(function() {
var context = canvas.getContext('2d');
context.drawImage(videoElement, 0, 0, width, height);
// We are interested in a window starting from the center of the image
// where we expect the circle from the fake video capture to be rolling.
var pixels =
context.getImageData(width / 2, height / 2, aperture, aperture);
var lightGreenPixelsX = 0;
var lightGreenPixelsY = 0;
// Walk horizontally counting light green pixels.
for (var x = 0; x < aperture; ++x) {
if (pixels.data[4 * x + 1] != COLOR_BACKGROUND_GREEN)
lightGreenPixelsX++;
}
// Walk vertically counting light green pixels.
for (var y = 0; y < aperture; ++y) {
if (pixels.data[4 * y * aperture + 1] != 135)
lightGreenPixelsY++;
}
if (lightGreenPixelsX > maxLightGreenPixelsX &&
lightGreenPixelsX < aperture)
maxLightGreenPixelsX = lightGreenPixelsX;
if (lightGreenPixelsY > maxLightGreenPixelsY &&
lightGreenPixelsY < aperture)
maxLightGreenPixelsY = lightGreenPixelsY;
var detectedAspectRatioString = "";
if (++iterations > maxIterations) {
clearInterval(waitVideo);
observedAspectRatio = maxLightGreenPixelsY / maxLightGreenPixelsX;
// At this point the observed aspect ratio is either 1, for undistorted
// 4:3, or some other aspect ratio that is seen as distorted.
if (Math.abs(observedAspectRatio - 1.333) < 0.1)
detectedAspectRatioString = "16:9";
else if (Math.abs(observedAspectRatio - 1.20) < 0.1)
detectedAspectRatioString = "16:10";
else if (Math.abs(observedAspectRatio - 1.0) < 0.1)
detectedAspectRatioString = "4:3";
else
detectedAspectRatioString = "UNKNOWN aspect ratio";
console.log(detectedAspectRatioString + " observed aspect ratio (" +
observedAspectRatio + ")");
// The FakeVideoCapture calculates the circle radius as
// std::min(capture_format_.width, capture_format_.height) / 4;
// we do the same and see if both dimensions are scaled, meaning
// we started from a cropped or stretched image.
var nonDistortedRadius = Math.min(width, height) / 4;
if ((maxLightGreenPixelsX != nonDistortedRadius) &&
(maxLightGreenPixelsY != nonDistortedRadius)) {
detectedAspectRatioString += " cropped";
} else
detectedAspectRatioString += " letterbox";
console.log("Original image is: " + detectedAspectRatioString);
callback(detectedAspectRatioString);
}
},
50);
}
...@@ -33,11 +33,6 @@ void WebRtcContentBrowserTest::SetUp() { ...@@ -33,11 +33,6 @@ void WebRtcContentBrowserTest::SetUp() {
ContentBrowserTest::SetUp(); ContentBrowserTest::SetUp();
} }
bool WebRtcContentBrowserTest::ExecuteJavascript(
const std::string& javascript) {
return ExecuteScript(shell()->web_contents(), javascript);
}
// Executes |javascript|. The script is required to use // Executes |javascript|. The script is required to use
// window.domAutomationController.send to send a string value back to here. // window.domAutomationController.send to send a string value back to here.
std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult( std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult(
...@@ -49,12 +44,14 @@ std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult( ...@@ -49,12 +44,14 @@ std::string WebRtcContentBrowserTest::ExecuteJavascriptAndReturnResult(
return result; return result;
} }
void WebRtcContentBrowserTest::ExpectTitle( void WebRtcContentBrowserTest::ExecuteJavascriptAndWaitForOk(
const std::string& expected_title) const { const std::string& javascript) {
base::string16 expected_title16(base::ASCIIToUTF16(expected_title)); std::string result = ExecuteJavascriptAndReturnResult(javascript);
TitleWatcher title_watcher(shell()->web_contents(), expected_title16); if (result != "OK") {
EXPECT_EQ(expected_title16, title_watcher.WaitAndGetTitle()); printf("From javascript: %s", result.c_str());
} FAIL();
}
}
std::string WebRtcContentBrowserTest::GenerateGetUserMediaCall( std::string WebRtcContentBrowserTest::GenerateGetUserMediaCall(
const char* function_name, const char* function_name,
......
...@@ -15,16 +15,14 @@ class WebRtcContentBrowserTest: public ContentBrowserTest { ...@@ -15,16 +15,14 @@ class WebRtcContentBrowserTest: public ContentBrowserTest {
virtual void SetUp() OVERRIDE; virtual void SetUp() OVERRIDE;
protected: protected:
// Executes |javascript| and returns after it has been executed.
bool ExecuteJavascript(const std::string& javascript);
// Executes |javascript|. The script is required to use // Executes |javascript|. The script is required to use
// window.domAutomationController.send to send a string value back to here. // window.domAutomationController.send to send a string value back to here.
std::string ExecuteJavascriptAndReturnResult( std::string ExecuteJavascriptAndReturnResult(
const std::string& javascript); const std::string& javascript);
// Waits for the page title to be set to |expected_title|. // Waits for the javascript to return OK via the automation controller.
void ExpectTitle(const std::string& expected_title) const; // If the javascript returns != OK or times out, we fail the test.
void ExecuteJavascriptAndWaitForOk(const std::string& javascript);
// Generates javascript code for a getUserMedia call. // Generates javascript code for a getUserMedia call.
std::string GenerateGetUserMediaCall(const char* function_name, std::string GenerateGetUserMediaCall(const char* function_name,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment