Commit bcdf9a3b authored by jansson's avatar jansson Committed by Commit bot

Replace peerconnection.html with a redirect to github.

Removed all other manual test files in the ../webrtc/manual folder. They
too have moved to GitHub but are not used that often hence no redirect
pages are necessary.

The ../webrtc/manual folder now resides here:
https://github.com/GoogleChrome/webrtc/tree/master/samples/web/content/manual-test

BUG=NONE
NOTRY=TRUE

Review URL: https://codereview.chromium.org/609733002

Cr-Commit-Position: refs/heads/master@{#297156}
parent 52c839c2
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
<html>
<head>
<title>WebRTC GetUserMedia Constraints Manual Test</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script type="text/javascript" src="../adapter.js"></script>
<script type="text/javascript" src="constraints.js"></script>
<link rel="StyleSheet" type="text/css" href="stylesheet.css">
<meta charset="utf-8">
</head>
<body>
<div id="wrapper">
<div id="left">
<p>This page can create GetUserMedia <a href="http://goo.gl/qZQjV">
MediaStreamConstraints</a> that can be used on the<br/>
<a href="peerconnection.html">PeerConnection Manual Test page.</a></p>
MediaStreamConstraints:<br/>
<textarea id="getusermedia-constraints" rows="10" cols="60"
readonly="readonly"/></textarea>
<br/>
Audio<input type="checkbox" id="audio" checked />
Video<input type="checkbox" id="video" checked />
<h2>Video constraints</h2>
Only applicable if the video checkbox is checked above.
<h3>Mandatory</h3>
Min
<input type="text" id="mandatory-min-width" size="4"/>x
<input type="text" id="mandatory-min-height" size="4"/>
FPS: <input type="text" id="mandatory-min-fps" size="2"/>
Aspect ratio: <input type="text" id="mandatory-min-ar" size="2"/><br/>
Max
<input type="text" id="mandatory-max-width" size="4"/>x
<input type="text" id="mandatory-max-height" size="4"/>
FPS: <input type="text" id="mandatory-max-fps" size="2"/>
Aspect ratio: <input type="text" id="mandatory-max-ar" size="2"/>
<h3>Optional</h3>
Min
<input type="text" id="optional-min-width" size="4"/>x
<input type="text" id="optional-min-height" size="4"/>
FPS: <input type="text" id="optional-min-fps" size="2"/>
Aspect ratio: <input type="text" id="optional-min-ar" size="2"/><br/>
Max
<input type="text" id="optional-max-width" size="4"/>x
<input type="text" id="optional-max-height" size="4"/>
FPS: <input type="text" id="optional-max-fps" size="2"/>
Aspect ratio: <input type="text" id="optional-max-ar" size="2"/><br/>
<button id="re-request" onclick="doGetUserMedia();">
Request GetUserMedia
</button>
<h2>Messages</h2>
<pre id="messages"></pre>
</div>
<div>
<h2>Local Preview</h2>
<video width="320" height="240" id="local-view" autoplay="autoplay">
</video>
</div>
</div>
</body>
</html>
/**
* Copyright (c) 2012 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
/**
* See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more
* information on getUserMedia.
*/
/**
* Asks permission to use the webcam and mic from the browser.
*/
function doGetUserMedia() {
// Call into getUserMedia via the polyfill (adapter.js).
var constraints = getConstraints_();
var constraintsString = JSON.stringify(constraints, null, ' ');
$('getusermedia-constraints').innerHTML = constraintsString;
if (!getUserMedia) {
log_('Browser does not support WebRTC.');
return;
}
log_('Requesting getUserMedia with constraints: ' + constraintsString);
getUserMedia(constraints, getUserMediaOkCallback_,
getUserMediaFailedCallback_);
}
// Internals
/**
* Builds a Javascript constraints dictionary out of the selected options in the
* HTML controls on the page.
* @private
* @return {Object} A dictionary of constraints.
*/
function getConstraints_() {
var c = {};
c.audio = $('audio').checked;
if (!$('video').checked) {
c.video = false;
} else {
c.video = { mandatory: {}, optional: [] };
// Mandatory - min
if ($('mandatory-min-width').value != '') {
c.video.mandatory.minWidth = $('mandatory-min-width').value;
}
if ($('mandatory-min-height').value != '') {
c.video.mandatory.minHeight = $('mandatory-min-height').value;
}
if ($('mandatory-min-fps').value != '') {
c.video.mandatory.minFrameRate = $('mandatory-min-fps').value;
}
if ($('mandatory-min-ar').value != '') {
c.video.mandatory.minAspectRatio = $('mandatory-min-ar').value;
}
// Mandatory - max
if ($('mandatory-max-width').value != '') {
c.video.mandatory.maxWidth = $('mandatory-max-width').value;
}
if ($('mandatory-max-height').value != '') {
c.video.mandatory.maxHeight = $('mandatory-max-height').value;
}
if ($('mandatory-max-fps').value != '') {
c.video.mandatory.maxFrameRate = $('mandatory-max-fps').value;
}
if ($('mandatory-max-ar').value != '') {
c.video.mandatory.maxAspectRatio = $('mandatory-max-ar').value;
}
// Optional - min
if ($('optional-min-width').value != '') {
c.video.optional.push({ minWidth: $('optional-min-width').value });
}
if ($('optional-min-height').value != '') {
c.video.optional.push({ minHeight: $('optional-min-height').value });
}
if ($('optional-min-fps').value != '') {
c.video.optional.push({ minFrameRate: $('optional-min-fps').value });
}
if ($('optional-min-ar').value != '') {
c.video.optional.push({ minAspectRatio: $('optional-min-ar').value });
}
// Optional - max
if ($('optional-max-width').value != '') {
c.video.optional.push({ maxWidth: $('optional-max-width').value });
}
if ($('optional-max-height').value != '') {
c.video.optional.push({ maxHeight: $('optional-max-height').value });
}
if ($('optional-max-fps').value != '') {
c.video.optional.push({ maxFrameRate: $('optional-max-fps').value });
}
if ($('optional-max-ar').value != '') {
c.video.optional.push({ maxAspectRatio: $('optional-max-ar').value });
}
}
return c;
}
/**
* @private
* @param {MediaStream} stream Media stream.
*/
function getUserMediaOkCallback_(stream) {
gLocalStream = stream;
var videoTag = $('local-view');
attachMediaStream(videoTag, stream);
// Due to crbug.com/110938 the size is 0 when onloadedmetadata fires.
// videoTag.onloadedmetadata = updateVideoTagSize_(videoTag);
// Use setTimeout as a workaround for now.
setTimeout(function() {updateVideoTagSize_(videoTag)}, 500);
gRequestWebcamAndMicrophoneResult = 'ok-got-stream';
}
/**
* @private
* @param {Object} videoTag The video tag to update.
*/
function updateVideoTagSize_(videoTag) {
// Don't update if sizes are 0 (happens for Chrome M23).
if (videoTag.videoWidth > 0 && videoTag.videoHeight > 0) {
log_('Set video tag width and height: ' + videoTag.videoWidth + 'x' +
videoTag.videoHeight);
videoTag.width = videoTag.videoWidth;
videoTag.height = videoTag.videoHeight;
}
}
/**
* @private
* @param {NavigatorUserMediaError} error Error containing details.
*/
function getUserMediaFailedCallback_(error) {
log_('Failed with error: ' + error);
}
$ = function(id) {
return document.getElementById(id);
};
/**
* Simple logging function.
* @private
* @param {string} message Message to print.
*/
function log_(message) {
console.log(message);
$('messages').innerHTML += message + '<br>';
}
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
<html>
<head>
<title>WebRTC PeerConnection Manual Test Help Page</title>
<link rel="StyleSheet" type="text/css" href="stylesheet.css">
<meta charset="utf-8">
</head>
<body>
<h1>WebRTC PeerConnection Manual Test Help Page</h1>
<p>
The test page is intended for testing WebRTC calls.
This is how you set up a normal call:
</p>
<ol>
<li>Open this page in two tabs.</li>
<li>Start the peerconnection server. Click on the question mark next
to the 'server' field for instruction on how to do that. The easiest
thing is to start it on localhost, but you can start it on any
machine you like and connect to hostname:8888.</li>
<li>Click the Connect button in both tabs.</li>
<li>Click the Call:Negotiate button in one of the tabs. You should see a bunch
of printouts when this happens. Note that no streams are sent to
begin with (although you could run steps 5-6 before this step to get streams
even in the initial call).</li>
<li>Grant media access using the checkboxes and Request button.</li>
<li>Add the local stream by clicking the "Add" button, in both tabs.</li>
<li>Now you must re-negotiate the call by clicking on Negotiate again.</li>
<li>You should now have a call up and both sides should be receiving
media data (depending on what access you granted on the respective
pages).</li>
<li>You can now choose to stop, re-request, re-send or disable streams
in any way you like, or hang up and re-start the call. You don't
need to disconnect: that's done automatically when you close the
page. Hanging up is NOT done automatically though.</li>
</ol>
<p>
To create a data channel:
</p>
<ol>
<li>Make sure Chrome is started with the --enable-data-channels flag.</li>
<li>Follow the instructions above to connect two tabs to a
peerconnection_server.</li>
<li>Click the Data channel: Create button in one tab. Notice the status
changes to "connecting".</li>
<li>Click the Call:Negotiate button. You should see the status change to
"open" in both tabs. </li>
<li>Enter text in the textbox next to the Send data button and then click Send
data. Notice the text is received in the remote tab in the Received on data
channel text box. Data can be sent in both direct.</li>
<li>To close the channel press the Close button followed by Negotiate. Notice
the status change to "closed"</li>
</ol>
<p>Detailed descriptions:</p>
<ul>
<li>Connect - once a connection is established, you generally won't
need to click this button again. Connecting really isn't something
related to WebRTC as such, it's just the signalling solution.</li>
<li>Note that if more than two users/machines have established a
connection to the same PC server, you will get an error when
pressing this button. The test is hard-coded to only allow 2 peers
on the server at the same time.</li>
<li>Pressing the Add button for local streams will in effect add
the current local stream, such as it is, to the current
peerconnection.</li>
<li>If you request user media again, it will overwrite the current
local stream with the new one. This means that pressing Add will
add the stream you just got from the request. The code will not
attempt to stop or remove the previous stream from the
peerconnection, so depending on peerconnection's semantics the old
stream will remain with the peerconnection (perhaps the streams will
be sent simultaneously?)</li>
<li>Hang Up will clear away peer connections on both sides, and a new
call can be started if desired. The peers remain connected to the
peerconnection server.</li>
<li>The Toggle buttons will set the .enabled properties on the first
video and audio track for the local or remote stream, respectively.
This is effectively a temporary "mute" for the streams.</li>
<li>Stop terminates a stream, which means it will no longer send any
more data.</li>
<li>Remove will remove the current local stream from the current
peerconnection. For instance, you should be able to send a stream,
remove it, re-request a new stream and send that within the same
call. Note that re-requesting user media overwrites the current
media stream, so the reverse is not possible.</li>
<li>The PeerConnection constraints field can pass in constraints for the
peerconnection to be established. The code will attempt to eval the code
you write in and pass it whenever the code asks for constraints.
[experimental]</li>
<li>The Force Opus checkbox will remove all codecs except OPUS for all
outgoing messages sent by this page. Note that this ONLY means that
we are guaranteed to send Opus to the other side; it does NOT mean
that the other side will necessarily send Opus to us. To do that,
you need to check the box on the other side too. You can either
check the box before the call, or check the box and then re-send the
local stream.</li>
</ul>
</body>
</html>
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
<html>
<head>
<title>WebRTC IFRAME PeerConnection Manual Test</title>
<meta charset="utf-8">
</head>
<body>
<iframe width="100%" height="100%" src="peerconnection.html"></iframe>
</body>
</html>
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
<!--
Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
Use of this source code is governed by a BSD-style license
that can be found in the LICENSE file in the root of the source
tree. An additional intellectual property rights grant can be found
in the file PATENTS. All contributing project authors may
be found in the AUTHORS file in the root of the source tree.
-->
<html>
<head>
<title>WebRTC Multi-PeerConnection Test</title>
<script type="text/javascript">
// This file can create an arbitrary number of peer connection calls, each
// with an arbitrary number of auto-echoing data channels. It can run with
// two separate cameras.
// Our two local video / audio streams.
var gLocalStream1 = null;
var gLocalStream2 = null;
// The number of remote view windows (2x number of calls).
var gNumRemoteViews = 0;
// Maps connection id -> { connection1, connection2 }.
var gAllConnections = [];
var gNumConnections = 0;
// Maps data channel id -> sending channel.
// Note: there can be many data channels per connection id.
var gSendingDataChannels = [];
var gTotalNumSendChannels = 0;
function startTest() {
navigator.webkitGetUserMedia(
{video: true, audio: true},
function(localStream) {
gLocalStream1 = localStream;
play(localStream, 'local-view-1');
},
getUserMediaFailedCallback);
navigator.webkitGetUserMedia(
{video: true, audio: true},
function(localStream) {
gLocalStream2 = localStream;
play(localStream, 'local-view-2');
},
getUserMediaFailedCallback);
}
function playStreamInNewRemoteView(stream, peerNumber) {
console.log('Remote stream to connection ' + peerNumber +
': ' + stream.label);
gNumRemoteViews++;
var viewName = 'remote-view-' + gNumRemoteViews;
addRemoteView(viewName, peerNumber);
play(stream, viewName);
}
function addRemoteView(elementName, peerNumber) {
var remoteViews = $('remote-views-' + peerNumber);
remoteViews.innerHTML +=
'<tr><td><video width="320" height="240" id="' + elementName + '" ' +
'autoplay="autoplay"></video></td></tr>';
}
function play(stream, videoElement) {
var streamUrl = URL.createObjectURL(stream);
$(videoElement).src = streamUrl;
}
function getUserMediaFailedCallback(error) {
console.log('getUserMedia request failed with code ' + error.code);
}
function call() {
connection1 = new webkitRTCPeerConnection(null,
{optional:[{RtpDataChannels: true}]});
connection1.addStream(gLocalStream1);
connection2 = new webkitRTCPeerConnection(
null, {optional:[{RtpDataChannels: true}]});
connection2.addStream(gLocalStream2);
connection2.onicecandidate = function(event) {
if (event.candidate) {
var candidate = new RTCIceCandidate(event.candidate);
connection1.addIceCandidate(candidate);
}
};
connection1.onicecandidate = function(event) {
if (event.candidate) {
console.log('Ice candidate: ' + event.candidate);
var candidate = new RTCIceCandidate(event.candidate);
connection2.addIceCandidate(candidate);
}
};
connection1.onaddstream = function(event) {
playStreamInNewRemoteView(event.stream, 1);
//addDataChannelAnchor(connection1, connection2);
};
connection2.onaddstream = function(event) {
playStreamInNewRemoteView(event.stream, 2);
};
// TODO(phoglund): hack to work around
// https://code.google.com/p/webrtc/issues/detail?id=1203. When it is fixed,
// uncomment the negotiate call, remove addDataChannel and uncomment in
// connection1.onaddstream. Also remove the notice at the top of the HTML!
// negotiate(connection1, connection2);
addDataChannelAnchor(connection1, connection2);
}
function negotiate(connection1, connection2) {
connection1.createOffer(function(offer) {
connection1.setLocalDescription(offer);
connection2.setRemoteDescription(offer);
connection2.createAnswer(function(answer) {
console.log('Created answer ' + answer);
connection2.setLocalDescription(answer);
connection1.setRemoteDescription(answer);
});
});
}
function addDataChannelAnchor(connection1, connection2) {
var connectionId = gNumConnections++;
gAllConnections[connectionId] = { connection1: connection1,
connection2: connection2 };
addOneAnchor(1, connectionId);
addOneAnchor(2, connectionId);
}
function makeDataChannelAnchorName(peerId, connectionId) {
return 'data-channels-peer' + peerId + '-' + connectionId;
}
// This adds a target table we'll add our input fields to later.
function addOneAnchor(peerId, connectionId) {
var newButtonId = 'add-data-channel-' + connectionId;
var remoteViewContainer = 'remote-views-' + peerId;
$(remoteViewContainer).innerHTML +=
'<tr><td><button id="' + newButtonId + '" ' +
'onclick="addDataChannel(' + connectionId + ')">' +
' Add Echoing Data Channel</button></td></tr>';
var anchorName = makeDataChannelAnchorName(peerId, connectionId);
$(remoteViewContainer).innerHTML +=
'<tr><td><table id="' + anchorName + '"></table></td></tr>';
}
// Called by clicking Add Echoing Data Channel.
function addDataChannel(connectionId) {
var dataChannelId = gTotalNumSendChannels++;
var peer1SinkId = addDataChannelSink(1, connectionId, dataChannelId);
var peer2SinkId = addDataChannelSink(2, connectionId, dataChannelId);
var connections = gAllConnections[connectionId];
configureChannels(connections.connection1, connections.connection2,
peer1SinkId, peer2SinkId, dataChannelId);
// Add the field the user types in, and a
// dummy field so everything lines up nicely.
addDataChannelSource(1, connectionId, dataChannelId);
addDisabledInputField(2, connectionId, '(the above is echoed)');
negotiate(connections.connection1, connections.connection2);
}
function configureChannels(connection1, connection2, targetFor1, targetFor2,
dataChannelId) {
// Label the channel so we know where to send the data later in dispatch.
sendChannel = connection1.createDataChannel(
targetFor2, { reliable : false });
sendChannel.onmessage = function(messageEvent) {
$(targetFor1).value = messageEvent.data;
}
gSendingDataChannels[dataChannelId] = sendChannel;
connection2.ondatachannel = function(event) {
// The channel got created by a message from a sending channel: hook this
// new receiver channel up to dispatch and then echo any messages.
event.channel.onmessage = dispatchAndEchoDataMessage;
}
}
function addDataChannelSink(peerNumber, connectionId, dataChannelId) {
var sinkId = 'data-sink-peer' + peerNumber + '-' + dataChannelId;
var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
anchor.innerHTML +=
'<tr><td><input type="text" id="' + sinkId + '" disabled/></td></tr>';
return sinkId;
}
function addDataChannelSource(peerNumber, connectionId, dataChannelId) {
var sourceId = 'data-source-peer' + peerNumber + '-' + dataChannelId;
var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
anchor.innerHTML +=
'<tr><td><input type="text" id="' + sourceId + '"' +
' onchange="userWroteSomethingIn(\'' + sourceId + '\', ' +
dataChannelId + ');"/></td></tr>';
}
function userWroteSomethingIn(sourceId, dataChannelId) {
var source = $(sourceId);
var dataChannel = gSendingDataChannels[dataChannelId];
dataChannel.send(source.value);
}
function addDisabledInputField(peerNumber, connectionId, text) {
var anchor = $(makeDataChannelAnchorName(peerNumber, connectionId));
anchor.innerHTML +=
'<tr><td><input type="text" value="' + text + '" disabled/></td></tr>';
}
function dispatchAndEchoDataMessage(messageEvent) {
// Since we labeled the channel earlier, we know to which input element
// we should send the data.
var dataChannel = messageEvent.currentTarget;
var targetInput = $(dataChannel.label);
targetInput.value = messageEvent.data;
dataChannel.send('echo: ' + messageEvent.data);
}
window.onload = function() {
startTest();
}
$ = function(id) {
return document.getElementById(id);
};
</script>
</head>
<body>
<table border="0">
<tr>
<td colspan="2">
Notes:
<ul>
<li>Due to https://code.google.com/p/webrtc/issues/detail?id=1203,
you must create a data channel to actually get a call negotiated. Add
one call at a time and click "add echoing data channel" for each and
you'll be fine.</li>
<li>For unknown reasons, adding a new data channel will clear the
input field contents for all other channels on the same call. This is
not the data channel's fault though.</li>
</ul>
</td>
</tr>
<tr>
<td>Local Preview for Peer 1</td>
<td>Local Preview for Peer 2</td>
</tr>
<tr>
<td><video width="320" height="240" id="local-view-1"
autoplay="autoplay"></video></td>
<td><video width="320" height="240" id="local-view-2"
autoplay="autoplay"></video></td>
</tr>
<tr>
<td><button id="add-call" onclick="call();">Add Call</button></td>
</tr>
<tr>
<td>
<table id="remote-views-1">
<tr>
<td>Remote (Incoming to Peer 1)</td>
</tr>
</table>
</td>
<td>
<table id="remote-views-2">
<tr>
<td>Remote (Incoming to Peer 2)</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>
\ No newline at end of file
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"> <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
<html> <html>
<head> <!--
Copyright 2014 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<head>
<title>WebRTC PeerConnection Manual Test</title> <title>WebRTC PeerConnection Manual Test</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox --> <META http-equiv="refresh" content="5;URL=http://goo.gl/Pu2xKX">
<script src="../adapter.js"></script> </head>
<script src="peerconnection_manual.js"></script> <body>
<link rel="StyleSheet" href="stylesheet.css"> <center>
<meta charset="utf-8"> <p>The contents you are looking for have moved to GitHub. You will be
</head> redirected to the new location automatically in 5 seconds.
<body> </p>
<p>WebRTC demo and sample pages now live on
<div id="wrapper"> <a href="http://googlechrome.github.io/webrtc/">GitHub.</a>
<div id="header"> This is a temporary redirect.
GetUserMedia <a href="http://goo.gl/V7cZg">MediaStreamConstraints</a>: </p>
<input type="text" id="getusermedia-constraints" wrap="soft"> </center>
Audio<input type="checkbox" id="audio" checked </body>
onclick="updateGetUserMediaConstraints();"/>
Video<input type="checkbox" id="video" checked
onclick="updateGetUserMediaConstraints();"/>
Screen capture<input type="checkbox" id="screencapture"
onclick="updateGetUserMediaConstraints();"/>
<button class="button-green" id="re-request"
onclick="getUserMediaFromHere();">Request GetUserMedia</button><br/>
Audio source <select class="drop-down" id="audiosrc"
onchange="updateGetUserMediaConstraints();"></select>
Video source <select class="drop-down" id="videosrc"
onchange="updateGetUserMediaConstraints();"></select>
Optional min size <input type="text" id="video-width" value="1280"
size="5px" onblur="updateGetUserMediaConstraints();">
x <input type="text" id="video-height" value="720" size="5px"
onblur="updateGetUserMediaConstraints();">
<button id="get-devices" onclick="getDevices();">
Get devices</button>
Onload<input type="checkbox" id="get-devices-onload">
You can also use <a href="constraints.html">constraints.html</a>&nbsp;&nbsp;
<a href="peerconnection-help.html" target="_blank">Help</a>
<br/>
</div>
<div id="container">
<div class="left">
<div>
<h2>Local Preview</h2>
<video width="320" height="240" id="local-view" autoplay="autoplay"
muted></video><br/>
</div>
<div>
<div>
Size: <div id="local-view-size" class="inline-contents"></div>
<div id="local-view-stream-size" class="inline-contents">(stream
size: N/A)</div><br/>
</div>
<div>
Resize: <button onclick="updateVideoTagSize('local-view')"> To
stream size</button>
<button onclick="updateVideoTagSize('local-view', 320, 240);">
320x240</button>
<button onclick="updateVideoTagSize('local-view', 640, 480);">
640x480</button>
</div>
</div>
<h2>Send on data channel</h2>
<input type="text" id="data-channel-send" size="10" />
<button onclick="sendDataFromHere();">Send data</button><br>
<h2>Settings</h2>
Server [<a href="" onclick="showServerHelp();">?</a>]:
<input type="text" id="pc-server" size="30"
value="http://localhost:8888"/>
Peer ID: <input type="text" id="peer-id" size="10" />
<button class="button-green" id="connect" onclick="connectFromHere();">
Connect</button><br/>
PeerConnection Constraints:
CPU overuse <input type="checkbox" id="cpuoveruse-detection"
onclick="setPeerConnectionConstraints();" checked="true"/>
RTP <input type="checkbox" id="data-channel-type-rtp"
onclick="setPeerConnectionConstraints();"><br/>
<input class="width-100" type="text" id="pc-constraints" value="{}" ><br/>
PeerConnection <a href="http://goo.gl/xVi12">
createOffer MediaConstraints:</a><br/>
<input type="text" class="width-100" id="pc-createoffer-constraints"
value="{}"/><br/>
PeerConnection <a href="http://goo.gl/0TjfX">
createAnswer MediaConstraints:</a><br/>
<input type="text" class="width-100" id="pc-createanswer-constraints"
value="{}"/><br/>
Call:
<button class="button-green" onclick="negotiateCallFromHere();">Negotiate
</button>
<button class="button-red" onclick="hangUpFromHere();">Hang up</button>
<br/>
Local Stream:
<button class="button-green" onclick="addLocalStreamFromHere();">Add
</button>
<button class="button-red" onclick="removeLocalStreamFromHere();">Remove
</button>
<button class="button-red" onclick="stopLocalFromHere();">Stop</button>
<button onclick="toggleLocalVideoFromHere();">Toggle Video</button>
<button onclick="toggleLocalAudioFromHere();">Toggle Audio</button><br/>
Remote Stream:
<button onclick="toggleRemoteVideoFromHere();">Toggle Video</button>
<button onclick="toggleRemoteAudioFromHere();">Toggle Audio</button><br/>
Data Channel:
<button onclick="createDataChannelFromHere();">Create</button>
<button onclick="closeDataChannelFromHere();">Close</button>
status:
<input type="text" id="data-channel-status" size="10" value="not created"
disabled="true"/>
ID:
<input type="text" id="data-channel-id" size="10" disabled="true"/><br/>
DTMF Sender:
<button onclick="createDtmfSenderFromHere();">Create</button>
tones:
<input type="text" id="dtmf-tones" size="10" value="123,abc" />
duration(ms):
<input type="text" id="dtmf-tones-duration" size="10" value="100" />
gap(ms):
<input type="text" id="dtmf-tones-gap" size="10" value="50" />
<button onclick="insertDtmfFromHere();">Send</button><br/>
Options:
<input type="checkbox" id="force-isac" onclick="forceIsacChanged();"/>
Force iSAC in Outgoing SDP<br/>
<button onclick="clearLog();">Clear Logs</button>
</div>
<div class="right">
<div>
<h2>Remote Video</h2>
<video width="320" height="240" id="remote-view" autoplay="autoplay">
</video><br/>
</div>
<div>
<div>
Size: <div id="remote-view-size" class="inline-contents"></div>
<div id="remote-view-stream-size" class="inline-contents">(stream size
:N/A)</div><br/>
</div>
<div>
Resize: <button onclick="updateVideoTagSize('remote-view')"> To
stream size</button>
<button onclick="updateVideoTagSize('remote-view', 320, 240);">
320x240</button>
<button onclick="updateVideoTagSize('remote-view', 640, 480);">
640x480</button>
</div>
</div>
<h2>Received on data channel</h2>
<textarea id="data-channel-receive" rows="7" cols="40" disabled="true">
</textarea>
<h2>Sent DTMF tones</h2>
<textarea id="dtmf-tones-sent" rows="7" cols="40" disabled="true">
</textarea>
</div>
<div id="footer">
<div class="left">
<h2>Messages</h2>
<pre id="messages"></pre>
</div>
<div class="right">
<h2>Debug</h2>
<pre id="debug"></pre>
</div>
</div>
</div>
</div>
</body>
</html> </html>
/**
* Copyright 2014 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
/**
* See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more
* information on getUserMedia. See
* http://dev.w3.org/2011/webrtc/editor/webrtc.html for more information on
* peerconnection and webrtc in general.
*/
/** TODO(jansson) give it a better name
* Global namespace object.
*/
var global = {};
/**
* We need a STUN server for some API calls.
* @private
*/
var STUN_SERVER = 'stun.l.google.com:19302';
/** @private */
global.transformOutgoingSdp = function(sdp) { return sdp; };
/** @private */
global.dataStatusCallback = function(status) {};
/** @private */
global.dataCallback = function(data) {};
/** @private */
global.dtmfOnToneChange = function(tone) {};
/**
* Used as a shortcut for finding DOM elements by ID.
* @param {string} id is a case-sensitive string representing the unique ID of
* the element being sought.
* @return {object} id returns the element object specified as a parameter
*/
$ = function(id) {
return document.getElementById(id);
};
/**
* Prepopulate constraints from JS to the UI. Enumerate devices available
* via getUserMedia, register elements to be used for local storage.
*/
window.onload = function() {
hookupDataChannelCallbacks_();
hookupDtmfSenderCallback_();
updateGetUserMediaConstraints();
setupLocalStorageFieldValues();
acceptIncomingCalls();
setPeerConnectionConstraints();
if ($('get-devices-onload').checked == true) {
getDevices();
}
};
/**
* Disconnect before the tab is closed.
*/
window.onbeforeunload = function() {
disconnect_();
};
/** TODO (jansson) Fix the event assigment to allow the elements to have more
* than one event assigned to it (currently replaces existing events).
* A list of element id's to be registered for local storage.
*/
function setupLocalStorageFieldValues() {
registerLocalStorage_('pc-server');
registerLocalStorage_('pc-createanswer-constraints');
registerLocalStorage_('pc-createoffer-constraints');
registerLocalStorage_('get-devices-onload');
}
// Public HTML functions
// The *Here functions are called from peerconnection.html and will make calls
// into our underlying JavaScript library with the values from the page
// (have to be named differently to avoid name clashes with existing functions).
function getUserMediaFromHere() {
var constraints = $('getusermedia-constraints').value;
try {
doGetUserMedia_(constraints);
} catch (exception) {
print_('getUserMedia says: ' + exception);
}
}
function connectFromHere() {
var server = $('pc-server').value;
if ($('peer-id').value == '') {
// Generate a random name to distinguish us from other tabs:
$('peer-id').value = 'peer_' + Math.floor(Math.random() * 10000);
print_('Our name from now on will be ' + $('peer-id').value);
}
connect(server, $('peer-id').value);
}
function negotiateCallFromHere() {
// Set the global variables with values from our UI.
setCreateOfferConstraints(getEvaluatedJavaScript_(
$('pc-createoffer-constraints').value));
setCreateAnswerConstraints(getEvaluatedJavaScript_(
$('pc-createanswer-constraints').value));
ensureHasPeerConnection_();
negotiateCall_();
}
function addLocalStreamFromHere() {
ensureHasPeerConnection_();
addLocalStream();
}
function removeLocalStreamFromHere() {
removeLocalStream();
}
function hangUpFromHere() {
hangUp();
acceptIncomingCalls();
}
function toggleRemoteVideoFromHere() {
toggleRemoteStream(function(remoteStream) {
return remoteStream.getVideoTracks()[0];
}, 'video');
}
function toggleRemoteAudioFromHere() {
toggleRemoteStream(function(remoteStream) {
return remoteStream.getAudioTracks()[0];
}, 'audio');
}
function toggleLocalVideoFromHere() {
toggleLocalStream(function(localStream) {
return localStream.getVideoTracks()[0];
}, 'video');
}
function toggleLocalAudioFromHere() {
toggleLocalStream(function(localStream) {
return localStream.getAudioTracks()[0];
}, 'audio');
}
function stopLocalFromHere() {
stopLocalStream();
}
function createDataChannelFromHere() {
ensureHasPeerConnection_();
createDataChannelOnPeerConnection();
}
function closeDataChannelFromHere() {
ensureHasPeerConnection_();
closeDataChannelOnPeerConnection();
}
function sendDataFromHere() {
var data = $('data-channel-send').value;
sendDataOnChannel(data);
}
function createDtmfSenderFromHere() {
ensureHasPeerConnection_();
createDtmfSenderOnPeerConnection();
}
function insertDtmfFromHere() {
var tones = $('dtmf-tones').value;
var duration = $('dtmf-tones-duration').value;
var gap = $('dtmf-tones-gap').value;
insertDtmfOnSender(tones, duration, gap);
}
function forceIsacChanged() {
var forceIsac = $('force-isac').checked;
if (forceIsac) {
forceIsac_();
} else {
dontTouchSdp_();
}
}
/**
* Updates the constraints in the getusermedia-constraints text box with a
* MediaStreamConstraints string. This string is created based on the state
* of the 'audiosrc' and 'videosrc' checkboxes.
* If device enumeration is supported and device source id's are not null they
* will be added to the constraints string.
*/
function updateGetUserMediaConstraints() {
var selectedAudioDevice = $('audiosrc');
var selectedVideoDevice = $('videosrc');
var constraints = {audio: $('audio').checked,
video: $('video').checked
};
if ($('video').checked) {
// Default optional constraints placed here.
constraints.video = {optional: [{minWidth: $('video-width').value},
{minHeight: $('video-height').value},
{googLeakyBucket: true}]};
}
if (!selectedAudioDevice.disabled && !selectedAudioDevice.disabled) {
var devices = getSourcesFromField_(selectedAudioDevice,
selectedVideoDevice);
if ($('audio').checked) {
if (devices.audioId != null)
constraints.audio = {optional: [{sourceId: devices.audioId}]};
}
if ($('video').checked) {
if (devices.videoId != null)
constraints.video.optional.push({sourceId: devices.videoId});
}
}
if ($('screencapture').checked) {
var constraints = {
audio: $('audio').checked,
video: {mandatory: {chromeMediaSource: 'screen',
maxWidth: screen.width,
maxHeight: screen.height}}
};
if ($('audio').checked)
warning_('Audio for screencapture is not implemented yet, please ' +
'try to set audio = false prior requesting screencapture');
}
$('getusermedia-constraints').value = JSON.stringify(constraints, null, ' ');
}
function showServerHelp() {
alert('You need to build and run a peerconnection_server on some ' +
'suitable machine. To build it in chrome, just run make/ninja ' +
'peerconnection_server. Otherwise, read in https://code.google' +
'.com/searchframe#xSWYf0NTG_Q/trunk/peerconnection/README&q=REA' +
'DME%20package:webrtc%5C.googlecode%5C.com.');
}
function clearLog() {
$('messages').innerHTML = '';
$('debug').innerHTML = '';
}
/**
* Stops the local stream.
*/
function stopLocalStream() {
if (global.localStream == null)
error_('Tried to stop local stream, ' +
'but media access is not granted.');
global.localStream.stop();
}
/**
* Adds the current local media stream to a peer connection.
* @param {RTCPeerConnection} peerConnection
*/
function addLocalStreamToPeerConnection(peerConnection) {
if (global.localStream == null)
error_('Tried to add local stream to peer connection, but there is no ' +
'stream yet.');
try {
peerConnection.addStream(global.localStream, global.addStreamConstraints);
} catch (exception) {
error_('Failed to add stream with constraints ' +
global.addStreamConstraints + ': ' + exception);
}
print_('Added local stream.');
}
/**
* Removes the local stream from the peer connection.
* @param {rtcpeerconnection} peerConnection
*/
function removeLocalStreamFromPeerConnection(peerConnection) {
if (global.localStream == null)
error_('Tried to remove local stream from peer connection, but there is ' +
'no stream yet.');
try {
peerConnection.removeStream(global.localStream);
} catch (exception) {
error_('Could not remove stream: ' + exception);
}
print_('Removed local stream.');
}
/**
* Enumerates the audio and video devices available in Chrome and adds the
* devices to the HTML elements with Id 'audiosrc' and 'videosrc'.
* Checks if device enumeration is supported and if the 'audiosrc' + 'videosrc'
* elements exists, if not a debug printout will be displayed.
* If the device label is empty, audio/video + sequence number will be used to
* populate the name. Also makes sure the children has been loaded in order
* to update the constraints.
*/
function getDevices() {
selectedAudioDevice = $('audiosrc');
selectedVideoDevice = $('videosrc');
selectedAudioDevice.innerHTML = '';
selectedVideoDevice.innerHTML = '';
try {
eval(MediaStreamTrack.getSources(function() {}));
} catch (exception) {
selectedAudioDevice.disabled = true;
selectedVideoDevice.disabled = true;
$('get-devices').disabled = true;
$('get-devices-onload').disabled = true;
updateGetUserMediaConstraints();
error_('Device enumeration not supported. ' + exception);
}
MediaStreamTrack.getSources(function(devices) {
for (var i = 0; i < devices.length; i++) {
var option = document.createElement('option');
option.value = devices[i].id;
option.text = devices[i].label;
if (devices[i].kind == 'audio') {
if (option.text == '') {
option.text = devices[i].id;
}
selectedAudioDevice.appendChild(option);
} else if (devices[i].kind == 'video') {
if (option.text == '') {
option.text = devices[i].id;
}
selectedVideoDevice.appendChild(option);
} else {
error_('Device type ' + devices[i].kind + ' not recognized, ' +
'cannot enumerate device. Currently only device types' +
'\'audio\' and \'video\' are supported');
updateGetUserMediaConstraints();
return;
}
}
});
checkIfDeviceDropdownsArePopulated_();
}
/**
* Sets the transform to apply just before setting the local description and
* sending to the peer.
* @param {function} transformFunction A function which takes one SDP string as
* argument and returns the modified SDP string.
*/
function setOutgoingSdpTransform(transformFunction) {
global.transformOutgoingSdp = transformFunction;
}
/**
* Sets the MediaConstraints to be used for PeerConnection createAnswer() calls.
* @param {string} mediaConstraints The constraints, as defined in the
* PeerConnection JS API spec.
*/
function setCreateAnswerConstraints(mediaConstraints) {
global.createAnswerConstraints = mediaConstraints;
}
/**
* Sets the MediaConstraints to be used for PeerConnection createOffer() calls.
* @param {string} mediaConstraints The constraints, as defined in the
* PeerConnection JS API spec.
*/
function setCreateOfferConstraints(mediaConstraints) {
global.createOfferConstraints = mediaConstraints;
}
/**
* Sets the callback functions that will receive DataChannel readyState updates
* and received data.
* @param {function} status_callback The function that will receive a string
* with
* the current DataChannel readyState.
* @param {function} data_callback The function that will a string with data
* received from the remote peer.
*/
function setDataCallbacks(status_callback, data_callback) {
global.dataStatusCallback = status_callback;
global.dataCallback = data_callback;
}
/**
* Sends data on an active DataChannel.
* @param {string} data The string that will be sent to the remote peer.
*/
function sendDataOnChannel(data) {
if (global.dataChannel == null)
error_('Trying to send data, but there is no DataChannel.');
global.dataChannel.send(data);
}
/**
* Sets the callback function that will receive DTMF sender ontonechange events.
* @param {function} ontonechange The function that will receive a string with
* the tone that has just begun playout.
*/
function setOnToneChange(ontonechange) {
global.dtmfOnToneChange = ontonechange;
}
/**
* Inserts DTMF tones on an active DTMF sender.
* @param {string} tones to be sent.
* @param {string} duration duration of the tones to be sent.
* @param {string} interToneGap gap between the tones to be sent.
*/
function insertDtmf(tones, duration, interToneGap) {
if (global.dtmfSender == null)
error_('Trying to send DTMF, but there is no DTMF sender.');
global.dtmfSender.insertDTMF(tones, duration, interToneGap);
}
function handleMessage(peerConnection, message) {
var parsed_msg = JSON.parse(message);
if (parsed_msg.type) {
var session_description = new RTCSessionDescription(parsed_msg);
peerConnection.setRemoteDescription(
session_description,
function() { success_('setRemoteDescription'); },
function(error) { error_('setRemoteDescription', error); });
if (session_description.type == 'offer') {
print_('createAnswer with constraints: ' +
JSON.stringify(global.createAnswerConstraints, null, ' '));
peerConnection.createAnswer(
setLocalAndSendMessage_,
function(error) { error_('createAnswer', error); },
global.createAnswerConstraints);
}
return;
} else if (parsed_msg.candidate) {
var candidate = new RTCIceCandidate(parsed_msg);
peerConnection.addIceCandidate(candidate,
function() { success_('addIceCandidate'); },
function(error) { error_('addIceCandidate', error); }
);
return;
}
error_('unknown message received');
}
/**
* Sets the peerConnection constraints based on checkboxes.
* TODO (jansson) Make it possible to use the text field for constraints like
* for getUserMedia.
*/
function setPeerConnectionConstraints() {
// Only added optional for now.
global.pcConstraints = {
optional: []
};
global.pcConstraints.optional.push(
{googCpuOveruseDetection: $('cpuoveruse-detection').checked});
global.pcConstraints.optional.push(
{RtpDataChannels: $('data-channel-type-rtp').checked});
$('pc-constraints').value = JSON.stringify(global.pcConstraints, null, ' ');
}
function createPeerConnection(stun_server) {
servers = {iceServers: [{url: 'stun:' + stun_server}]};
try {
peerConnection = new RTCPeerConnection(servers, global.pcConstraints);
} catch (exception) {
error_('Failed to create peer connection: ' + exception);
}
peerConnection.onaddstream = addStreamCallback_;
peerConnection.onremovestream = removeStreamCallback_;
peerConnection.onicecandidate = iceCallback_;
peerConnection.ondatachannel = onCreateDataChannelCallback_;
return peerConnection;
}
function setupCall(peerConnection) {
print_('createOffer with constraints: ' +
JSON.stringify(global.createOfferConstraints, null, ' '));
peerConnection.createOffer(
setLocalAndSendMessage_,
function(error) { error_('createOffer', error); },
global.createOfferConstraints);
}
function answerCall(peerConnection, message) {
handleMessage(peerConnection, message);
}
function createDataChannel(peerConnection, label) {
if (global.dataChannel != null && global.dataChannel.readyState != 'closed')
error_('Creating DataChannel, but we already have one.');
global.dataChannel = peerConnection.createDataChannel(label,
{ reliable: false });
print_('DataChannel with label ' + global.dataChannel.label + ' initiated ' +
'locally.');
hookupDataChannelEvents();
}
function closeDataChannel(peerConnection) {
if (global.dataChannel == null)
error_('Closing DataChannel, but none exists.');
print_('DataChannel with label ' + global.dataChannel.label +
' is beeing closed.');
global.dataChannel.close();
}
function createDtmfSender(peerConnection) {
if (global.dtmfSender != null)
error_('Creating DTMF sender, but we already have one.');
var localStream = global.localStream;
if (localStream == null)
error_('Creating DTMF sender but local stream is null.');
local_audio_track = localStream.getAudioTracks()[0];
global.dtmfSender = peerConnection.createDTMFSender(local_audio_track);
global.dtmfSender.ontonechange = global.dtmfOnToneChange;
}
/**
* Connects to the provided peerconnection_server.
*
* @param {string} serverUrl The server URL in string form without an ending
* slash, something like http://localhost:8888.
* @param {string} clientName The name to use when connecting to the server.
*/
function connect(serverUrl, clientName) {
if (global.ourPeerId != null)
error_('connecting, but is already connected.');
print_('Connecting to ' + serverUrl + ' as ' + clientName);
global.serverUrl = serverUrl;
global.ourClientName = clientName;
request = new XMLHttpRequest();
request.open('GET', serverUrl + '/sign_in?' + clientName, true);
print_(serverUrl + '/sign_in?' + clientName);
request.onreadystatechange = function() {
connectCallback_(request);
};
request.send();
}
/**
* Checks if the remote peer has connected. Returns peer-connected if that is
* the case, otherwise no-peer-connected.
*/
function remotePeerIsConnected() {
if (global.remotePeerId == null)
print_('no-peer-connected');
else
print_('peer-connected');
}
/**
* Creates a peer connection. Must be called before most other public functions
* in this file.
*/
function preparePeerConnection() {
if (global.peerConnection != null)
error_('creating peer connection, but we already have one.');
global.peerConnection = createPeerConnection(STUN_SERVER);
success_('ok-peerconnection-created');
}
/**
* Adds the local stream to the peer connection. You will have to re-negotiate
* the call for this to take effect in the call.
*/
function addLocalStream() {
if (global.peerConnection == null)
error_('adding local stream, but we have no peer connection.');
addLocalStreamToPeerConnection(global.peerConnection);
print_('ok-added');
}
/**
* Removes the local stream from the peer connection. You will have to
* re-negotiate the call for this to take effect in the call.
*/
function removeLocalStream() {
if (global.peerConnection == null)
error_('attempting to remove local stream, but no call is up');
removeLocalStreamFromPeerConnection(global.peerConnection);
print_('ok-local-stream-removed');
}
/**
* (see getReadyState_)
*/
function getPeerConnectionReadyState() {
print_(getReadyState_());
}
/**
* Toggles the remote audio stream's enabled state on the peer connection, given
* that a call is active. Returns ok-[typeToToggle]-toggled-to-[true/false]
* on success.
*
* @param {function} selectAudioOrVideoTrack A function that takes a remote
* stream as argument and returns a track (e.g. either the video or audio
* track).
* @param {function} typeToToggle Either "audio" or "video" depending on what
* the selector function selects.
*/
function toggleRemoteStream(selectAudioOrVideoTrack, typeToToggle) {
if (global.peerConnection == null)
error_('Tried to toggle remote stream, but have no peer connection.');
if (global.peerConnection.getRemoteStreams().length == 0)
error_('Tried to toggle remote stream, but not receiving any stream.');
var track = selectAudioOrVideoTrack(
global.peerConnection.getRemoteStreams()[0]);
toggle_(track, 'remote', typeToToggle);
}
/**
* See documentation on toggleRemoteStream (this function is the same except
* we are looking at local streams).
*/
function toggleLocalStream(selectAudioOrVideoTrack, typeToToggle) {
if (global.peerConnection == null)
error_('Tried to toggle local stream, but have no peer connection.');
if (global.peerConnection.getLocalStreams().length == 0)
error_('Tried to toggle local stream, but there is no local stream in ' +
'the call.');
var track = selectAudioOrVideoTrack(
global.peerConnection.getLocalStreams()[0]);
toggle_(track, 'local', typeToToggle);
}
/**
* Hangs up a started call. Returns ok-call-hung-up on success. This tab will
* not accept any incoming calls after this call.
*/
function hangUp() {
if (global.peerConnection == null)
error_('hanging up, but has no peer connection');
if (getReadyState_() != 'active')
error_('hanging up, but ready state is not active (no call up).');
sendToPeer(global.remotePeerId, 'BYE');
closeCall_();
global.acceptsIncomingCalls = false;
print_('ok-call-hung-up');
}
/**
* Start accepting incoming calls.
*/
function acceptIncomingCalls() {
global.acceptsIncomingCalls = true;
}
/**
* Creates a DataChannel on the current PeerConnection. Only one DataChannel can
* be created on each PeerConnection.
* Returns ok-datachannel-created on success.
*/
function createDataChannelOnPeerConnection() {
if (global.peerConnection == null)
error_('Tried to create data channel, but have no peer connection.');
createDataChannel(global.peerConnection, global.ourClientName);
print_('ok-datachannel-created');
}
/**
* Close the DataChannel on the current PeerConnection.
* Returns ok-datachannel-close on success.
*/
function closeDataChannelOnPeerConnection() {
if (global.peerConnection == null)
error_('Tried to close data channel, but have no peer connection.');
closeDataChannel(global.peerConnection);
print_('ok-datachannel-close');
}
/**
* Creates a DTMF sender on the current PeerConnection.
* Returns ok-dtmfsender-created on success.
*/
function createDtmfSenderOnPeerConnection() {
if (global.peerConnection == null)
error_('Tried to create DTMF sender, but have no peer connection.');
createDtmfSender(global.peerConnection);
print_('ok-dtmfsender-created');
}
/**
* Send DTMF tones on the global.dtmfSender.
* Returns ok-dtmf-sent on success.
*/
function insertDtmfOnSender(tones, duration, interToneGap) {
if (global.dtmfSender == null)
error_('Tried to insert DTMF tones, but have no DTMF sender.');
insertDtmf(tones, duration, interToneGap);
print_('ok-dtmf-sent');
}
/**
* Sends a message to a peer through the peerconnection_server.
*/
function sendToPeer(peer, message) {
var messageToLog = message.sdp ? message.sdp : message;
print_('Sending message ' + messageToLog + ' to peer ' + peer + '.');
var request = new XMLHttpRequest();
var url = global.serverUrl + '/message?peer_id=' + global.ourPeerId + '&to=' +
peer;
request.open('POST', url, false);
request.setRequestHeader('Content-Type', 'text/plain');
request.send(message);
}
/**
* @param {!string} videoTagId The ID of the video tag to update.
* @param {!number} width of the video to update the video tag, if width or
* height is 0, size will be taken from videoTag.videoWidth.
* @param {!number} height of the video to update the video tag, if width or
* height is 0 size will be taken from the videoTag.videoHeight.
*/
function updateVideoTagSize(videoTagId, width, height) {
var videoTag = $(videoTagId);
if (width > 0 || height > 0) {
videoTag.width = width;
videoTag.height = height;
}
else {
if (videoTag.videoWidth > 0 || videoTag.videoHeight > 0) {
videoTag.width = videoTag.videoWidth;
videoTag.height = videoTag.videoHeight;
print_('Set video tag "' + videoTagId + '" size to ' + videoTag.width +
'x' + videoTag.height);
}
else {
print_('"' + videoTagId + '" video stream size is 0, skipping resize');
}
}
displayVideoSize_(videoTag);
}
// Internals.
/**
* Disconnects from the peerconnection server. Returns ok-disconnected on
* success.
*/
function disconnect_() {
if (global.ourPeerId == null)
return;
request = new XMLHttpRequest();
request.open('GET', global.serverUrl + '/sign_out?peer_id=' +
global.ourPeerId, false);
request.send();
global.ourPeerId = null;
print_('ok-disconnected');
}
/**
* Returns true if we are disconnected from peerconnection_server.
*/
function isDisconnected_() {
return global.ourPeerId == null;
}
/**
* @private
* @return {!string} The current peer connection's ready state, or
* 'no-peer-connection' if there is no peer connection up.
*
* NOTE: The PeerConnection states are changing and until chromium has
* implemented the new states we have to use this interim solution of
* always assuming that the PeerConnection is 'active'.
*/
function getReadyState_() {
if (global.peerConnection == null)
return 'no-peer-connection';
return 'active';
}
/**
* This function asks permission to use the webcam and mic from the browser. It
* will return ok-requested to the test. This does not mean the request was
* approved though. The test will then have to click past the dialog that
* appears in Chrome, which will run either the OK or failed callback as a
* a result. To see which callback was called, use obtainGetUserMediaResult_().
* @private
* @param {string} constraints Defines what to be requested, with mandatory
* and optional constraints defined. The contents of this parameter depends
* on the WebRTC version. This should be JavaScript code that we eval().
*/
function doGetUserMedia_(constraints) {
if (!getUserMedia) {
print_('Browser does not support WebRTC.');
return;
}
try {
var evaluatedConstraints;
eval('evaluatedConstraints = ' + constraints);
} catch (exception) {
error_('Not valid JavaScript expression: ' + constraints);
}
print_('Requesting doGetUserMedia: constraints: ' + constraints);
getUserMedia(evaluatedConstraints, getUserMediaOkCallback_,
getUserMediaFailedCallback_);
}
/**
* Must be called after calling doGetUserMedia.
* @private
* @return {string} Returns not-called-yet if we have not yet been called back
* by WebRTC. Otherwise it returns either ok-got-stream or
* failed-with-error-x (where x is the error code from the error
* callback) depending on which callback got called by WebRTC.
*/
function obtainGetUserMediaResult_() {
if (global.requestWebcamAndMicrophoneResult == null)
global.requestWebcamAndMicrophoneResult = ' not called yet';
return global.requestWebcamAndMicrophoneResult;
}
/**
* Negotiates a call with the other side. This will create a peer connection on
* the other side if there isn't one.
*
* To call this method we need to be aware of the other side, e.g. we must be
* connected to peerconnection_server and we must have exactly one peer on that
* server.
*
* This method may be called any number of times. If you haven't added any
* streams to the call, an "empty" call will result. The method will return
* ok-negotiating immediately to the test if the negotiation was successfully
* sent.
* @private
*/
function negotiateCall_() {
if (global.peerConnection == null)
error_('Negotiating call, but we have no peer connection.');
if (global.ourPeerId == null)
error_('Negotiating call, but not connected.');
if (global.remotePeerId == null)
error_('Negotiating call, but missing remote peer.');
setupCall(global.peerConnection);
print_('ok-negotiating');
}
/**
* This provides the selected source id from the objects in the parameters
* provided to this function. If the audioSelect or video_select objects does
* not have any HTMLOptions children it will return null in the source object.
* @param {!object} audioSelect HTML drop down element with audio devices added
* as HTMLOptionsCollection children.
* @param {!object} videoSelect HTML drop down element with audio devices added
* as HTMLOptionsCollection children.
* @return {!object} source contains audio and video source ID from
* the selected devices in the drop down menu elements.
* @private
*/
function getSourcesFromField_(audioSelect, videoSelect) {
var source = {
audioId: null,
videoId: null
};
if (audioSelect.options.length > 0) {
source.audioId = audioSelect.options[audioSelect.selectedIndex].value;
}
if (videoSelect.options.length > 0) {
source.videoId = videoSelect.options[videoSelect.selectedIndex].value;
}
return source;
}
/**
* @private
* @param {NavigatorUserMediaError} error Error containing details.
*/
function getUserMediaFailedCallback_(error) {
error_('GetUserMedia failed with error: ' + error.name);
}
/** @private */
function iceCallback_(event) {
if (event.candidate)
sendToPeer(global.remotePeerId, JSON.stringify(event.candidate));
}
/** @private */
function setLocalAndSendMessage_(session_description) {
session_description.sdp =
global.transformOutgoingSdp(session_description.sdp);
global.peerConnection.setLocalDescription(
session_description,
function() { success_('setLocalDescription'); },
function(error) { error_('setLocalDescription', error); });
print_('Sending SDP message:\n' + session_description.sdp);
sendToPeer(global.remotePeerId, JSON.stringify(session_description));
}
/** @private */
function addStreamCallback_(event) {
print_('Receiving remote stream...');
var videoTag = document.getElementById('remote-view');
attachMediaStream(videoTag, event.stream);
window.addEventListener('loadedmetadata', function() {
displayVideoSize_(videoTag);}, true);
}
/** @private */
function removeStreamCallback_(event) {
print_('Call ended.');
document.getElementById('remote-view').src = '';
}
/** @private */
function onCreateDataChannelCallback_(event) {
if (global.dataChannel != null && global.dataChannel.readyState != 'closed') {
error_('Received DataChannel, but we already have one.');
}
global.dataChannel = event.channel;
print_('DataChannel with label ' + global.dataChannel.label +
' initiated by remote peer.');
hookupDataChannelEvents();
}
/** @private */
function hookupDataChannelEvents() {
global.dataChannel.onmessage = global.dataCallback;
global.dataChannel.onopen = onDataChannelReadyStateChange_;
global.dataChannel.onclose = onDataChannelReadyStateChange_;
// Trigger global.dataStatusCallback so an application is notified
// about the created data channel.
onDataChannelReadyStateChange_();
}
/** @private */
function onDataChannelReadyStateChange_() {
var readyState = global.dataChannel.readyState;
print_('DataChannel state:' + readyState);
global.dataStatusCallback(readyState);
// Display dataChannel.id only when dataChannel is active/open.
if (global.dataChannel.readyState == 'open') {
$('data-channel-id').value = global.dataChannel.id;
} else if (global.dataChannel.readyState == 'closed') {
$('data-channel-id').value = '';
}
}
/**
* @private
* @param {MediaStream} stream Media stream.
*/
function getUserMediaOkCallback_(stream) {
global.localStream = stream;
global.requestWebcamAndMicrophoneResult = 'ok-got-stream';
success_('getUserMedia');
if (stream.getVideoTracks().length > 0) {
// Show the video tag if we did request video in the getUserMedia call.
var videoTag = $('local-view');
attachMediaStream(videoTag, stream);
window.addEventListener('loadedmetadata', function() {
displayVideoSize_(videoTag);}, true);
// Throw an error when no video is sent from camera but gUM returns OK.
stream.getVideoTracks()[0].onended = function() {
error_(global.localStream + ' getUserMedia successful but ' +
'MediaStreamTrack.onended event fired, no frames from camera.');
};
// Print information on track going to mute or back from it.
stream.getVideoTracks()[0].onmute = function() {
error_(global.localStream + ' MediaStreamTrack.onmute event has fired, ' +
'no frames to the track.');
};
stream.getVideoTracks()[0].onunmute = function() {
warning_(global.localStream + ' MediaStreamTrack.onunmute event has ' +
'fired.');
};
}
}
/**
* @private
* @param {string} videoTag The ID of the video tag + stream used to
* write the size to a HTML tag based on id if the div's exists.
*/
function displayVideoSize_(videoTag) {
if ($(videoTag.id + '-stream-size') && $(videoTag.id + '-size')) {
if (videoTag.videoWidth > 0 || videoTag.videoHeight > 0) {
$(videoTag.id + '-stream-size').innerHTML = '(stream size: ' +
videoTag.videoWidth + 'x' +
videoTag.videoHeight + ')';
$(videoTag.id + '-size').innerHTML = videoTag.width + 'x' +
videoTag.height;
}
} else {
print_('Skipping updating -stream-size and -size tags due to div\'s ' +
'are missing');
}
}
/**
* Checks if the 'audiosrc' and 'videosrc' drop down menu elements has had all
* of its children appended in order to provide device ID's to the function
* 'updateGetUserMediaConstraints()', used in turn to populate the getUserMedia
* constraints text box when the page has loaded.
* @private
*/
function checkIfDeviceDropdownsArePopulated_() {
if (document.addEventListener) {
$('audiosrc').addEventListener('DOMNodeInserted',
updateGetUserMediaConstraints, false);
$('videosrc').addEventListener('DOMNodeInserted',
updateGetUserMediaConstraints, false);
} else {
print_('addEventListener is not supported by your browser, cannot update ' +
'device source ID\'s automatically. Select a device from the audio' +
' or video source drop down menu to update device source id\'s');
}
}
/**
* Register an input element to use local storage to remember its state between
* sessions (using local storage). Only input elements are supported.
* @private
* @param {!string} element_id to be used as a key for local storage and the id
* of the element to store the state for.
*/
function registerLocalStorage_(element_id) {
var element = $(element_id);
if (element.tagName != 'INPUT') {
error_('You can only use registerLocalStorage_ for input elements. ' +
'Element \"' + element.tagName + '\" is not an input element. ');
}
if (localStorage.getItem(element.id) == null) {
storeLocalStorageField_(element);
} else {
getLocalStorageField_(element);
}
// Registers the appropriate events for input elements.
if (element.type == 'checkbox') {
element.onclick = function() { storeLocalStorageField_(this); };
} else if (element.type == 'text') {
element.onblur = function() { storeLocalStorageField_(this); };
} else {
error_('Unsupportered input type: ' + '\"' + element.type + '\"');
}
}
/**
* Fetches the stored values from local storage and updates checkbox status.
* @private
* @param {!Object} element of which id is representing the key parameter for
* local storage.
*/
function getLocalStorageField_(element) {
// Makes sure the checkbox status is matching the local storage value.
if (element.type == 'checkbox') {
element.checked = (localStorage.getItem(element.id) == 'true');
} else if (element.type == 'text') {
element.value = localStorage.getItem(element.id);
} else {
error_('Unsupportered input type: ' + '\"' + element.type + '\"');
}
}
/**
* Stores the string value of the element object using local storage.
* @private
* @param {!Object} element of which id is representing the key parameter for
* local storage.
*/
function storeLocalStorageField_(element) {
if (element.type == 'checkbox') {
localStorage.setItem(element.id, element.checked);
} else if (element.type == 'text') {
localStorage.setItem(element.id, element.value);
}
}
/**
* Create the peer connection if none is up (this is just convenience to
* avoid having a separate button for that).
* @private
*/
function ensureHasPeerConnection_() {
if (getReadyState_() == 'no-peer-connection') {
preparePeerConnection();
}
}
/**
* @private
* @param {string} message Text to print.
*/
function print_(message) {
print_handler_(message, 'messages', 'black');
}
/**
* @private
* @param {string} message Text to print.
*/
function success_(message) {
print_handler_(message, 'messages', 'green');
}
/**
* @private
* @param {string} message Text to print.
*/
function warning_(message) {
print_handler_(message, 'debug', 'orange');
}
/**
* @private
* @param {string} message Text to print.
*/
function error_(message) {
print_handler_(message, 'debug', 'red');
}
/**
* @private
* @param {string} message Text to print.
* @param {string} textField Element ID of where to print.
* @param {string} color Color of the text.
*/
function print_handler_(message, textField, color) {
if (color == 'green' )
message += ' success';
$(textField).innerHTML += '<span style="color:' + color + ';">' + message +
'</span><br>'
console.log(message);
if (color == 'red' )
throw new Error(message);
}
/**
* @private
* @param {string} stringRepresentation JavaScript as a string.
* @return {Object} The PeerConnection constraints as a JavaScript dictionary.
*/
function getEvaluatedJavaScript_(stringRepresentation) {
try {
var evaluatedJavaScript;
eval('evaluatedJavaScript = ' + stringRepresentation);
return evaluatedJavaScript;
} catch (exception) {
error_('Not valid JavaScript expression: ' + stringRepresentation);
}
}
/**
* Swaps lines within a SDP message.
* @private
* @param {string} sdp The full SDP message.
* @param {string} line The line to swap with swapWith.
* @param {string} swapWith The other line.
* @return {string} The altered SDP message.
*/
function swapSdpLines_(sdp, line, swapWith) {
var lines = sdp.split('\r\n');
var lineStart = lines.indexOf(line);
var swapLineStart = lines.indexOf(swapWith);
if (lineStart == -1 || swapLineStart == -1)
return sdp; // This generally happens on the first message.
var tmp = lines[lineStart];
lines[lineStart] = lines[swapLineStart];
lines[swapLineStart] = tmp;
return lines.join('\r\n');
}
/** @private */
function forceIsac_() {
setOutgoingSdpTransform(function(sdp) {
// Remove all other codecs (not the video codecs though).
sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
'm=audio $1 RTP/SAVPF 104\r\n');
sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:104 minptime=10');
sdp = sdp.replace(/a=rtpmap:(?!104)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
return sdp;
});
}
/** @private */
function dontTouchSdp_() {
setOutgoingSdpTransform(function(sdp) { return sdp; });
}
/** @private */
function hookupDataChannelCallbacks_() {
setDataCallbacks(function(status) {
$('data-channel-status').value = status;
},
function(data_message) {
print_('Received ' + data_message.data);
$('data-channel-receive').value =
data_message.data + '\n' + $('data-channel-receive').value;
});
}
/** @private */
function hookupDtmfSenderCallback_() {
setOnToneChange(function(tone) {
print_('Sent DTMF tone: ' + tone.tone);
$('dtmf-tones-sent').value =
tone.tone + '\n' + $('dtmf-tones-sent').value;
});
}
/** @private */
function toggle_(track, localOrRemote, audioOrVideo) {
if (!track)
error_('Tried to toggle ' + localOrRemote + ' ' + audioOrVideo +
' stream, but has no such stream.');
track.enabled = !track.enabled;
print_('ok-' + audioOrVideo + '-toggled-to-' + track.enabled);
}
/** @private */
function connectCallback_(request) {
print_('Connect callback: ' + request.status + ', ' + request.readyState);
if (request.status == 0) {
print_('peerconnection_server doesn\'t seem to be up.');
error_('failed connecting to peerConnection server');
}
if (request.readyState == 4 && request.status == 200) {
global.ourPeerId = parseOurPeerId_(request.responseText);
global.remotePeerId = parseRemotePeerIdIfConnected_(request.responseText);
startHangingGet_(global.serverUrl, global.ourPeerId);
print_('ok-connected');
}
}
/** @private */
function parseOurPeerId_(responseText) {
// According to peerconnection_server's protocol.
var peerList = responseText.split('\n');
return parseInt(peerList[0].split(',')[1]);
}
/** @private */
function parseRemotePeerIdIfConnected_(responseText) {
var peerList = responseText.split('\n');
if (peerList.length == 1) {
// No peers have connected yet - we'll get their id later in a notification.
return null;
}
var remotePeerId = null;
for (var i = 0; i < peerList.length; i++) {
if (peerList[i].length == 0)
continue;
var parsed = peerList[i].split(',');
var name = parsed[0];
var id = parsed[1];
if (id != global.ourPeerId) {
print_('Found remote peer with name ' + name + ', id ' +
id + ' when connecting.');
// There should be at most one remote peer in this test.
if (remotePeerId != null)
error_('Expected just one remote peer in this test: ' +
'found several.');
// Found a remote peer.
remotePeerId = id;
}
}
return remotePeerId;
}
/** @private */
function startHangingGet_(server, ourId) {
if (isDisconnected_())
return;
hangingGetRequest = new XMLHttpRequest();
hangingGetRequest.onreadystatechange = function() {
hangingGetCallback_(hangingGetRequest, server, ourId);
};
hangingGetRequest.ontimeout = function() {
hangingGetTimeoutCallback_(hangingGetRequest, server, ourId);
};
callUrl = server + '/wait?peer_id=' + ourId;
print_('Sending ' + callUrl);
hangingGetRequest.open('GET', callUrl, true);
hangingGetRequest.send();
}
/** @private */
function hangingGetCallback_(hangingGetRequest, server, ourId) {
if (hangingGetRequest.readyState != 4 || hangingGetRequest.status == 0) {
// Code 0 is not possible if the server actually responded. Ignore.
return;
}
if (hangingGetRequest.status != 200) {
error_('Error ' + hangingGetRequest.status + ' from server: ' +
hangingGetRequest.statusText);
}
var targetId = readResponseHeader_(hangingGetRequest, 'Pragma');
if (targetId == ourId)
handleServerNotification_(hangingGetRequest.responseText);
else
handlePeerMessage_(targetId, hangingGetRequest.responseText);
hangingGetRequest.abort();
restartHangingGet_(server, ourId);
}
/** @private */
function hangingGetTimeoutCallback_(hangingGetRequest, server, ourId) {
print_('Hanging GET times out, re-issuing...');
hangingGetRequest.abort();
restartHangingGet_(server, ourId);
}
/** @private */
function handleServerNotification_(message) {
var parsed = message.split(',');
if (parseInt(parsed[2]) == 1) {
// Peer connected - this must be our remote peer, and it must mean we
// connected before them (except if we happened to connect to the server
// at precisely the same moment).
print_('Found remote peer with name ' + parsed[0] + ', id ' + parsed[1] +
' when connecting.');
global.remotePeerId = parseInt(parsed[1]);
}
}
/** @private */
function closeCall_() {
if (global.peerConnection == null)
debug_('Closing call, but no call active.');
global.peerConnection.close();
global.peerConnection = null;
}
/** @private */
function handlePeerMessage_(peerId, message) {
print_('Received message from peer ' + peerId + ': ' + message);
if (peerId != global.remotePeerId) {
error_('Received notification from unknown peer ' + peerId +
' (only know about ' + global.remotePeerId + '.');
}
if (message.search('BYE') == 0) {
print_('Received BYE from peer: closing call');
closeCall_();
return;
}
if (global.peerConnection == null && global.acceptsIncomingCalls) {
// The other side is calling us.
print_('We are being called: answer...');
global.peerConnection = createPeerConnection(STUN_SERVER);
if ($('auto-add-stream-oncall') &&
obtainGetUserMediaResult_() == 'ok-got-stream') {
print_('We have a local stream, so hook it up automatically.');
addLocalStreamToPeerConnection(global.peerConnection);
}
answerCall(global.peerConnection, message);
return;
}
handleMessage(global.peerConnection, message);
}
/** @private */
function restartHangingGet_(server, ourId) {
window.setTimeout(function() {
startHangingGet_(server, ourId);
}, 0);
}
/** @private */
function readResponseHeader_(request, key) {
var value = request.getResponseHeader(key);
if (value == null || value.length == 0) {
error_('Received empty value ' + value +
' for response header key ' + key + '.');
}
return parseInt(value);
}
/**
* Copyright (c) 2012 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
body, td {
font-family: Helvetica Narrow, sans-serif;
font-size: 11px;
}
h1, h2, h3 {
margin: 6px 0px 3px 0px;
}
a {
text-decoration: none;
}
pre {
background-color: #eee;
margin-right: 20px;
}
a:hover {
text-decoration: underline;
}
textarea {
font-size: 11px;
font-family: monospace, Courier;
border: 1px solid #ccc;
}
button {
font-size: 11px;
margin-left: 0px;
}
input[type=text] {
border: 1px solid #ccc;
}
input[type=checkbox] {
position: relative;
top: 2px;
}
video {
background: #eee;
border:1px solid #aaa;
}
.drop-down {
width: 10%;
}
.left {
float: left;
width: 50%;
}
.right {
width: 49%;
margin-left: auto;
position: relative;
}
.inline-contents {
display: inline;
}
.button-green {
background: #064;
color: #FFF;
}
.button-red {
background: #B22;
color: #FFF;
}
.width-100 {
width: 100%;
}
#wrapper {
margin: 0px auto;
padding: 5px;
}
#header {
position: fixed;
overflow: auto;
width: 100%;
height: 50px;
}
#container {
position: fixed;
overflow: auto;
width: 100%;
height: 99%;
margin-top: 50px;
}
#footer {
overflow: auto;
width: 100%;
height: 50%;
}
#getusermedia-constraints {
width: 40%;
}
#debug {
overflow: auto;
word-wrap: break-word;
}
#messages {
overflow: auto;
word-wrap: break-word;
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment