Commit cf04d969 authored by kjellander's avatar kjellander Committed by Commit bot

Add simulcast browser test.

Also cleanup some unneeded setting of the kUseGpuInTests
command line flag.

TESTED=Successful run using:
out/Release/browser_tests --gtest_filter=WebRtcSimulcastBrowserTest.TestVgaReturnsTwoSimulcastStreams

BUG=

Review URL: https://codereview.chromium.org/874993002

Cr-Commit-Position: refs/heads/master@{#313095}
parent 3e7f7d83
...@@ -55,10 +55,8 @@ class WebRtcApprtcBrowserTest : public WebRtcTestBase { ...@@ -55,10 +55,8 @@ class WebRtcApprtcBrowserTest : public WebRtcTestBase {
void SetUpCommandLine(base::CommandLine* command_line) override { void SetUpCommandLine(base::CommandLine* command_line) override {
EXPECT_FALSE(command_line->HasSwitch(switches::kUseFakeUIForMediaStream)); EXPECT_FALSE(command_line->HasSwitch(switches::kUseFakeUIForMediaStream));
// The video playback will not work without a GPU, so force its use here. // Use fake devices in order to run on VMs.
command_line->AppendSwitch(switches::kUseGpuInTests); command_line->AppendSwitch(switches::kUseFakeDeviceForMediaStream);
base::CommandLine::ForCurrentProcess()->AppendSwitch(
switches::kUseFakeDeviceForMediaStream);
} }
void TearDown() override { void TearDown() override {
......
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/path_service.h"
#include "chrome/browser/media/webrtc_browsertest_base.h"
#include "chrome/browser/media/webrtc_browsertest_common.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_tabstrip.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/ui_test_utils.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/render_process_host.h"
#include "content/public/test/browser_test_utils.h"
#include "media/base/media_switches.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
#include "testing/perf/perf_test.h"
#include "ui/gl/gl_switches.h"
static const char kSimulcastTestPage[] = "/webrtc/webrtc-simulcast.html";
// Simulcast integration test. This test ensures 'a=x-google-flag:conference'
// is working and that Chrome is capable of sending simulcast streams. To run
// this test, Chrome must be hideout-enabled.
class WebRtcSimulcastBrowserTest : public WebRtcTestBase {
public:
// TODO(phoglund): Make it possible to enable DetectErrorsInJavaScript() here.
void SetUpCommandLine(base::CommandLine* command_line) override {
// Just answer 'allow' to GetUserMedia invocations.
command_line->AppendSwitch(switches::kUseFakeUIForMediaStream);
// Use fake devices in order to run on VMs.
command_line->AppendSwitch(switches::kUseFakeDeviceForMediaStream);
}
};
IN_PROC_BROWSER_TEST_F(WebRtcSimulcastBrowserTest,
TestVgaReturnsTwoSimulcastStreams) {
ASSERT_TRUE(embedded_test_server()->InitializeAndWaitUntilReady());
ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL(kSimulcastTestPage));
content::WebContents* tab_contents =
browser()->tab_strip_model()->GetActiveWebContents();
ASSERT_EQ("OK", ExecuteJavascript("testVgaReturnsTwoSimulcastStreams()",
tab_contents));
}
...@@ -116,9 +116,6 @@ class WebRtcVideoQualityBrowserTest : public WebRtcTestBase, ...@@ -116,9 +116,6 @@ class WebRtcVideoQualityBrowserTest : public WebRtcTestBase,
command_line->AppendSwitchPath(switches::kUseFileForFakeVideoCapture, command_line->AppendSwitchPath(switches::kUseFileForFakeVideoCapture,
webrtc_reference_video_y4m_); webrtc_reference_video_y4m_);
command_line->AppendSwitch(switches::kUseFakeDeviceForMediaStream); command_line->AppendSwitch(switches::kUseFakeDeviceForMediaStream);
// The video playback will not work without a GPU, so force its use here.
command_line->AppendSwitch(switches::kUseGpuInTests);
} }
// Writes all frames we've captured so far by grabbing them from the // Writes all frames we've captured so far by grabbing them from the
......
...@@ -310,6 +310,7 @@ ...@@ -310,6 +310,7 @@
'browser/media/chrome_webrtc_disable_encryption_flag_browsertest.cc', 'browser/media/chrome_webrtc_disable_encryption_flag_browsertest.cc',
'browser/media/chrome_webrtc_getmediadevices_browsertest.cc', 'browser/media/chrome_webrtc_getmediadevices_browsertest.cc',
'browser/media/chrome_webrtc_perf_browsertest.cc', 'browser/media/chrome_webrtc_perf_browsertest.cc',
'browser/media/chrome_webrtc_simulcast_browsertest.cc',
'browser/media/chrome_webrtc_video_quality_browsertest.cc', 'browser/media/chrome_webrtc_video_quality_browsertest.cc',
'browser/media/chrome_webrtc_webcam_browsertest.cc', 'browser/media/chrome_webrtc_webcam_browsertest.cc',
'browser/media/encrypted_media_istypesupported_browsertest.cc', 'browser/media/encrypted_media_istypesupported_browsertest.cc',
......
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>WebRTC Simulcast Test</title>
<style>
video {
border:5px solid black;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="localVideo" autoplay></video>
<!-- These video tags and canvases should match the stream labels returned by
PC_SERVER_REMOTE_OFFER. The canvases are there for video detection. --!>
<video id="remoteVideo1" autoplay></video>
<video id="remoteVideo2" autoplay></video>
<video id="remoteVideo3" autoplay></video>
<canvas id="remoteVideo1-canvas" autoplay style="display:none"></video>
<canvas id="remoteVideo2-canvas" autoplay style="display:none"></video>
<canvas id="remoteVideo3-canvas" autoplay style="display:none"></video>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript">
var pcClient, pcServer;
var localStream;
var remoteVideoUrls = [];
var serverAnswer;
// When all required video tags are playing video, succeed the test.
setAllEventsOccuredHandler(function() {
returnToTest('OK');
});
// Checks that we can get a simulcast stream call up on VGA.
function testVgaReturnsTwoSimulcastStreams() {
initialize();
openCamera(640, 480);
// For VGA we should get a QVGA and VGA stream. The video tags are named after
// the stream IDs which are defined in PC_SERVER_REMOTE_OFFER.
waitForVideo('remoteVideo1');
waitForVideo('remoteVideo2');
}
// Template for the local offer, representing a single input stream backing
// 3 different SSRCs, all in one SIM ssrc-group.
function makeClientOffer() {
var lines = [
'v=0',
'o=- 0 3 IN IP4 127.0.0.1',
's=-',
't=0 0',
'm=video 1 RTP/SAVPF 100',
'a=rtcp:1 IN IP4 0.0.0.0',
'a=ice-ufrag:6HHHdzzeIhkE0CKj',
'a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew',
'a=sendonly',
'a=mid:video',
'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' +
'inline:Rlz8z1nMtwq9VF7j06kTc7uyio1iYuEdeZ7z1P9E',
'a=rtpmap:100 VP8/30',
'a=fmtp:100 x-google-start-bitrate=100000',
'a=fmtp:100 x-google-min-bitrate=80000',
'a=x-google-flag:conference'
];
if (localStream) {
var videoTracks = localStream.getVideoTracks();
if (videoTracks.length > 0) {
trace('Using Video device: ' + videoTracks[0].id);
} else {
trace('WARNING: No video device!');
return lines.join('\n');
}
lines = lines.concat([
'a=ssrc-group:SIM 1 2 3',
'a=ssrc:1 cname:localVideo',
'a=ssrc:1 msid:' + localStream.id + ' ' + videoTracks[0].id,
'a=ssrc:2 cname:localVideo',
'a=ssrc:2 msid:' + localStream.id + ' ' + videoTracks[0].id,
'a=ssrc:3 cname:localVideo',
'a=ssrc:3 msid:' + localStream.id + ' ' + videoTracks[0].id
]);
}
lines.push('');
return new RTCSessionDescription({
'type': 'offer',
'sdp': lines.join('\n')
});
}
// Remote perspective on that offer, representing each SSRC as a distinct
// (non-synchronized) output video stream.
var PC_SERVER_REMOTE_OFFER = [
'v=0',
'o=- 0 3 IN IP4 127.0.0.1',
's=-',
't=0 0',
'm=video 1 RTP/SAVPF 100',
'a=sendonly',
'a=mid:video',
'a=rtcp:1 IN IP4 0.0.0.0',
'a=ice-ufrag:6HHHdzzeIhkE0CKj',
'a=ice-pwd:XYDGVpfvklQIEnZ6YnyLsAew',
'a=crypto:1 AES_CM_128_HMAC_SHA1_80 ' +
'inline:Rlz8z1nMtwq9VF7j06kTc7uyio1iYuEdeZ7z1P9E',
'a=rtpmap:100 VP8/30',
'a=x-google-flag:conference',
'a=fmtp:100 x-google-start-bitrate=100000',
'a=fmtp:100 x-google-min-bitrate=80000',
'a=ssrc:1 cname:remoteVideo1',
'a=ssrc:1 msid:remoteVideo1 remoteVideo1v0',
'a=ssrc:2 cname:remoteVideo2',
'a=ssrc:2 msid:remoteVideo2 remoteVideo2v0',
'a=ssrc:3 cname:remoteVideo3',
'a=ssrc:3 msid:remoteVideo3 remoteVideo3v0',
''
].join('\n');
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ': ' + text);
}
function initialize() {
trace('Setting up for a new call.');
var servers = null;
var constraints = {'mandatory': {'DtlsSrtpKeyAgreement': false}};
pcClient = new webkitRTCPeerConnection(servers, constraints);
trace('Created local peer connection object pcClient');
pcClient.onicecandidate = onClientIceCandidate;
pcServer = new webkitRTCPeerConnection(servers, constraints);
trace('Created remote peer connection object pcServer');
pcServer.onicecandidate = onServerIceCandidate;
pcServer.onaddstream = onServerGotStream;
var pcClientInitialOffer = makeClientOffer();
trace('Setting initial local Offer to:\n' + pcClientInitialOffer);
pcClient.setLocalDescription(pcClientInitialOffer,
setServerRemoteDescription);
}
function gotStream(stream) {
trace('Received local stream');
localVideo.src = webkitURL.createObjectURL(stream);
localStream = stream;
pcClient.addStream(localStream);
renegotiateClient();
}
function didntGetStream(err) {
returnToTest('Unexpectedly failed to acquire user media: ' + err);
}
function openCamera(width, height) {
if (localStream) {
pcClient.removeStream(localStream);
localStream.stop();
localStream = null;
}
navigator.webkitGetUserMedia({
audio: false,
video: {'mandatory': {'minWidth': width, 'maxWidth': width,
'minHeight': height, 'maxHeight': height}}
}, gotStream, didntGetStream);
}
function renegotiateClient() {
pcClient.setLocalDescription(makeClientOffer(), function() {
pcClient.setRemoteDescription(serverAnswer);
});
}
function setServerRemoteDescription() {
trace('Setting remote Offer to:\n' + PC_SERVER_REMOTE_OFFER);
pcServer.setRemoteDescription(new RTCSessionDescription({
'type': 'offer',
'sdp': PC_SERVER_REMOTE_OFFER
}), afterSetServerRemoteDescription);
}
function afterSetServerRemoteDescription() {
pcServer.createAnswer(onServerAnswer);
}
function onServerAnswer(desc) {
desc.sdp += 'a=x-google-flag:conference\n';
serverAnswer = desc;
trace('Setting both Answers to:\n' + desc.sdp);
pcServer.setLocalDescription(desc);
pcClient.setRemoteDescription(desc);
}
function onServerGotStream(e) {
trace('Received remote stream: ' + e.stream.label +
'; looking up corresponding video tag.');
var remoteVideo = $(e.stream.label);
if (!remoteVideo) {
// All streams we receive must have a corresponding video tag defined in the
// html, otherwise we can't detect video in it.
throw 'Received video with unexpected id ' + e.stream.label;
}
remoteVideo.src = webkitURL.createObjectURL(e.stream);
}
function onClientIceCandidate(event) {
if (event.candidate) {
pcServer.addIceCandidate(event.candidate);
trace('Local ICE candidate:\n' + event.candidate.candidate);
}
}
function onServerIceCandidate(event) {
if (event.candidate) {
pcClient.addIceCandidate(event.candidate);
trace('Remote ICE candidate:\n' + event.candidate.candidate);
}
}
function returnToTest(message) {
if (!window.domAutomationController)
throw 'Expected to run in an automated context.';
window.domAutomationController.send(message);
}
$ = function(id) {
return document.getElementById(id);
};
</script>
</body>
// Copyright (c) 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file contains duplicated functions in of
// content/test/data/media/webrtc_test_utilities.js
// TODO(phoglund): Eliminate this copy and rewrite the
// WebRtcBrowserTest.TestVgaReturnsTwoSimulcastStreams test to use the browser
// tests style instead.
// These must match with how the video and canvas tags are declared in html.
const VIDEO_TAG_WIDTH = 320;
const VIDEO_TAG_HEIGHT = 240;
// Number of test events to occur before the test pass. When the test pass,
// the function gAllEventsOccured is called.
var gNumberOfExpectedEvents = 0;
// Number of events that currently have occurred.
var gNumberOfEvents = 0;
var gAllEventsOccured = function () {};
// Use this function to set a function that will be called once all expected
// events has occurred.
function setAllEventsOccuredHandler(handler) {
gAllEventsOccured = handler;
}
// See comments on waitForVideo.
function detectVideoIn(videoElementName, callback) {
var width = VIDEO_TAG_WIDTH;
var height = VIDEO_TAG_HEIGHT;
var videoElement = $(videoElementName);
var canvas = $(videoElementName + '-canvas');
var waitVideo = setInterval(function() {
var context = canvas.getContext('2d');
context.drawImage(videoElement, 0, 0, width, height);
var pixels = context.getImageData(0, 0, width, height).data;
if (isVideoPlaying(pixels, width, height)) {
clearInterval(waitVideo);
callback();
}
}, 100);
}
/**
* Blocks test success until the provided videoElement has playing video.
*
* @param videoElementName The id of the video element. There must also be a
* canvas somewhere in the DOM tree with the id |videoElementName|-canvas.
* @param callback The callback to call.
*/
function waitForVideo(videoElement) {
document.title = 'Waiting for video...';
addExpectedEvent();
detectVideoIn(videoElement, function () { eventOccured(); });
}
/**
* Blocks test success until the provided peerconnection reports the signaling
* state 'stable'.
*
* @param peerConnection The peer connection to look at.
*/
function waitForConnectionToStabilize(peerConnection) {
addExpectedEvent();
var waitForStabilization = setInterval(function() {
if (peerConnection.signalingState == 'stable') {
clearInterval(waitForStabilization);
eventOccured();
}
}, 100);
}
/**
* Adds an expectation for an event to occur at some later point. You may call
* this several times per test, which will each add an expected event. Once all
* events have occurred, we'll call the "all events occurred" handler which will
* generally succeed the test or move the test to the next phase.
*/
function addExpectedEvent() {
++gNumberOfExpectedEvents;
}
// See comment on addExpectedEvent.
function eventOccured() {
++gNumberOfEvents;
if (gNumberOfEvents == gNumberOfExpectedEvents) {
gAllEventsOccured();
}
}
// This very basic video verification algorithm will be satisfied if any
// pixels are nonzero in a small sample area in the middle. It relies on the
// assumption that a video element with null source just presents zeroes.
function isVideoPlaying(pixels, width, height) {
// Sample somewhere near the middle of the image.
var middle = width * height / 2;
for (var i = 0; i < 20; i++) {
if (pixels[middle + i] > 0) {
return true;
}
}
return false;
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment