Commit a3a4cd96 authored by Patrik Höglund's avatar Patrik Höglund Committed by Commit Bot

Reland: Replacing the last addExpectedEvent with promises.

This concludes the refactoring. The tests now have a MUCH clearer
control flow, and test success is no longer handled behind the scenes.

I found that it's better to keep promise chains as long as possible,
since we can then append a single .catch(failTest) at the end. This
isn't strictly necessary - the test will time out since an error
will break the promise chain and not call reportTestSuccess, but
it's much nicer if the test fails right away on exceptions, rather
than timing out after 45 seconds.

Bug: chromium:777857
Change-Id: I46c110782488bb20cdf7101690ca6966601506f2
Reviewed-on: https://chromium-review.googlesource.com/793153
Commit-Queue: Patrik Höglund <phoglund@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Cr-Commit-Position: refs/heads/master@{#519695}
parent 5642e6d4
......@@ -435,16 +435,10 @@ IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
expected_result);
}
// Test fails under MSan, http://crbug.com/445745
#if defined(MEMORY_SANITIZER)
#define MAYBE_TwoGetUserMediaAndVerifyFrameRate \
DISABLED_TwoGetUserMediaAndVerifyFrameRate
#else
#define MAYBE_TwoGetUserMediaAndVerifyFrameRate \
TwoGetUserMediaAndVerifyFrameRate
#endif
// Test fails under MSan, http://crbug.com/445745.
// Flaky everywhere: https://crbug.com/789121.
IN_PROC_BROWSER_TEST_F(WebRtcGetUserMediaBrowserTest,
MAYBE_TwoGetUserMediaAndVerifyFrameRate) {
DISABLED_TwoGetUserMediaAndVerifyFrameRate) {
ASSERT_TRUE(embedded_test_server()->Start());
GURL url(embedded_test_server()->GetURL("/media/getusermedia.html"));
......
......@@ -17,10 +17,6 @@
return document.getElementById(id);
};
setAllEventsOccuredHandler(function() {
reportTestSuccess();
});
// testVideoToImageBitmap and the tests below are layout tests that we
// run here because they require --use-fake-device-for-media-capture.
function getDepthStreamAndCallCreateImageBitmap() {
......
This diff is collapsed.
......@@ -20,28 +20,26 @@
var gRemoteStreams = {};
setAllEventsOccuredHandler(reportTestSuccess);
// The second set of constraints should request audio (e.g. audio:true) since
// we expect audio to be playing after the second renegotiation.
function callAndRenegotiateToAudio(constraints, renegotiationConstraints) {
createConnections(null);
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
waitForConnectionToStabilize(gFirstConnection).then(() => {
gFirstConnection.removeStream(gLocalStream);
gSecondConnection.removeStream(gLocalStream);
navigator.mediaDevices.getUserMedia(renegotiationConstraints)
.then(addStreamToTheFirstConnectionAndNegotiate)
.catch(printGetUserMediaError);
waitForConnectionToStabilize(gFirstConnection)
.then(() => { return ensureAudioPlaying(gSecondConnection); })
.then(reportTestSuccess);
});
return navigator.mediaDevices.getUserMedia(renegotiationConstraints)
.then(addStreamToTheFirstConnectionAndNegotiate);
}).then(() => {
return waitForConnectionToStabilize(gFirstConnection)
}).then(() => {
return ensureAudioPlaying(gSecondConnection);
})
.then(reportTestSuccess)
.catch(failTest);
}
function setupCallAndPromiseAudioPlaying(constraints) {
......@@ -50,7 +48,7 @@
// Add the local stream to gFirstConnection to play one-way audio.
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToTheFirstConnectionAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
return waitForConnectionToStabilize(gFirstConnection)
.then(() => { return ensureAudioPlaying(gSecondConnection); });
......@@ -58,7 +56,8 @@
function callAndEnsureAudioIsPlaying(constraints) {
setupCallAndPromiseAudioPlaying(constraints)
.then(reportTestSuccess);
.then(reportTestSuccess)
.catch(failTest);
}
function callWithIsac16KAndEnsureAudioIsPlaying(constraints) {
......@@ -99,9 +98,10 @@
// Call is up, now mute the remote track and check we stop playing out
// audio (after a small delay, we don't expect it to happen instantly).
enableRemoteAudio(gSecondConnection, false);
ensureSilence(gSecondConnection)
return ensureSilence(gSecondConnection)
.then(reportTestSuccess);
});
})
.catch(failTest);
}
function callAndEnsureLocalAudioTrackMutingWorks() {
......@@ -109,9 +109,10 @@
// Call is up, now mute the local track of the sending side and ensure
// the receiving side stops receiving audio.
enableLocalAudio(gFirstConnection, false);
ensureSilence(gSecondConnection)
return ensureSilence(gSecondConnection)
.then(reportTestSuccess);
});
})
.catch(failTest);
}
function callAndEnsureAudioTrackUnmutingWorks() {
......@@ -127,15 +128,16 @@
setTimeout(function() {
ensureAudioPlaying(gSecondConnection)
.then(reportTestSuccess);
.then(reportTestSuccess)
.catch(failTest);
}, 1500);
});
}).catch(failTest);
}
function callAndEnsureLocalVideoMutingDoesntMuteAudio() {
setupCallAndPromiseAudioPlaying({audio: true, video: true}).then(() => {
enableLocalVideo(gFirstConnection, false);
ensureAudioPlaying(gSecondConnection)
return ensureAudioPlaying(gSecondConnection)
.then(reportTestSuccess);
});
}
......@@ -143,7 +145,7 @@
function callAndEnsureRemoteVideoMutingDoesntMuteAudio() {
setupCallAndPromiseAudioPlaying({audio: true, video: true}).then(() => {
enableRemoteVideo(gSecondConnection, false);
ensureAudioPlaying(gSecondConnection)
return ensureAudioPlaying(gSecondConnection)
.then(reportTestSuccess);
});
}
......
......@@ -30,8 +30,6 @@
'inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj'
};
setAllEventsOccuredHandler(reportTestSuccess);
// Test that we can setup call with legacy settings.
function callWithLegacySdp() {
setOfferSdpTransform(function(sdp) {
......@@ -40,12 +38,13 @@
createConnections({
'mandatory': {'RtpDataChannels': true, 'DtlsSrtpKeyAgreement': false}
});
setupDataChannel({reliable: false});
var hasExchanged = promiseDataChannelExchange({reliable: false});
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
hasExchanged,
detectVideoPlaying('remote-view-1'),
detectVideoPlaying('remote-view-2')
]).then(reportTestSuccess);
......@@ -54,25 +53,28 @@
// Test only a data channel.
function callWithDataOnly() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel({reliable: false});
promiseDataChannelExchange({reliable: false})
.then(reportTestSuccess);
negotiate();
}
function callWithSctpDataOnly() {
createConnections({optional: [{DtlsSrtpKeyAgreement: true}]});
setupSctpDataChannel({reliable: true});
promiseSctpDataChannelExchange({reliable: true})
.then(reportTestSuccess);
negotiate();
}
// Test call with audio, video and a data channel.
function callWithDataAndMedia() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel({reliable: false});
var hasExchanged = promiseDataChannelExchange({reliable: false});
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
hasExchanged,
detectVideoPlaying('remote-view-1'),
detectVideoPlaying('remote-view-2')
]).then(reportTestSuccess);
......@@ -80,12 +82,13 @@
function callWithSctpDataAndMedia() {
createConnections({optional: [{DtlsSrtpKeyAgreement: true}]});
setupSctpDataChannel({reliable: true});
var hasExchanged = promiseSctpDataChannelExchange({reliable: true});
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
hasExchanged,
detectVideoPlaying('remote-view-1'),
detectVideoPlaying('remote-view-2')
]).then(reportTestSuccess);
......@@ -94,21 +97,22 @@
// Test call with a data channel and later add audio and video.
function callWithDataAndLaterAddMedia() {
createConnections({optional:[{RtpDataChannels: true}]});
setupDataChannel({reliable: false});
var hasExchanged = promiseDataChannelExchange({reliable: false});
negotiate();
// Set an event handler for when the data channel has been closed.
setAllEventsOccuredHandler(function() {
hasExchanged.then(() => {
// When the video is flowing the test is done.
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
Promise.all([
return navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate);
}).then(() => {
return Promise.all([
detectVideoPlaying('remote-view-1'),
detectVideoPlaying('remote-view-2')
]).then(reportTestSuccess);
});
]);
})
.then(reportTestSuccess)
.catch(failTest);
}
// This function is used for setting up a test that:
......@@ -117,8 +121,12 @@
// 2. When data is received on |gSecondConnection| a message
// is sent to |gFirstConnection|.
// 3. When data is received on |gFirstConnection|, the data
// channel is closed. The test passes when the state transition completes.
function setupDataChannel(params) {
// channel is closed. This function returns a promise that resolves when
// that last channel is closed.
//
// Note: you need to negotiate after calling this function, or the exchange
// will not happen, and the promise will not resolve.
function promiseDataChannelExchange(params) {
var sendDataString = "send some text on a data channel."
firstDataChannel = gFirstConnection.createDataChannel(
"sendDataChannel", params);
......@@ -139,11 +147,12 @@
}
// When |firstDataChannel| transition to closed state, the test pass.
addExpectedEvent();
firstDataChannel.onclose = function() {
assertEquals('closed', firstDataChannel.readyState);
eventOccured();
}
var closedPromise = new Promise((resolve, reject) => {
firstDataChannel.onclose = function() {
assertEquals('closed', firstDataChannel.readyState);
resolve();
}
});
// Event handler for when |gSecondConnection| receive a new dataChannel.
gSecondConnection.ondatachannel = function (event) {
......@@ -158,6 +167,8 @@
secondDataChannel.send(sendDataString);
}
}
return closedPromise;
}
// SCTP data channel setup is slightly different then RTP based
......@@ -165,7 +176,9 @@
// after channel becomes open. So for that reason in SCTP,
// we are sending data from second channel, when ondatachannel event is
// received. So data flow happens 2 -> 1 -> 2.
function setupSctpDataChannel(params) {
// Note: you need to negotiate after calling this function, or the exchange
// will not happen, and the promise will not resolve.
function promiseSctpDataChannelExchange(params) {
var sendDataString = "send some text on a data channel."
firstDataChannel = gFirstConnection.createDataChannel(
"sendDataChannel", params);
......@@ -184,30 +197,31 @@
firstDataChannel.send(sendDataString);
}
// Event handler for when |gSecondConnection| receive a new dataChannel.
gSecondConnection.ondatachannel = function (event) {
// Make secondDataChannel global to make sure it's not gc'd.
secondDataChannel = event.channel;
secondDataChannel.onopen = function() {
secondDataChannel.send(sendDataString);
}
// When |secondDataChannel| receive a message, close the channel and
// initiate a new offer/answer exchange to complete the closure.
secondDataChannel.onmessage = function(event) {
assertEquals(event.data, sendDataString);
assertEquals('open', secondDataChannel.readyState);
secondDataChannel.close();
negotiate();
}
// When |secondDataChannel| transition to closed state, the test pass.
addExpectedEvent();
secondDataChannel.onclose = function() {
assertEquals('closed', secondDataChannel.readyState);
eventOccured();
return new Promise((resolve, reject) => {
// Event handler for when |gSecondConnection| receive a new dataChannel.
gSecondConnection.ondatachannel = function (event) {
// Make secondDataChannel global to make sure it's not gc'd.
secondDataChannel = event.channel;
secondDataChannel.onopen = function() {
secondDataChannel.send(sendDataString);
}
// When |secondDataChannel| receive a message, close the channel and
// initiate a new offer/answer exchange to complete the closure.
secondDataChannel.onmessage = function(event) {
assertEquals(event.data, sendDataString);
assertEquals('open', secondDataChannel.readyState);
secondDataChannel.close();
negotiate();
}
// When |secondDataChannel| transition to closed state, we're done.
secondDataChannel.onclose = function() {
assertEquals('closed', secondDataChannel.readyState);
resolve();
}
}
}
});
}
function addStreamToBothConnectionsAndNegotiate(localStream) {
......
......@@ -20,15 +20,13 @@
var gRemoteStreams = {};
setAllEventsOccuredHandler(reportTestSuccess);
// Test that we can setup a call with an audio and video track (must request
// video in this call since we expect video to be playing).
function call(constraints) {
createConnections(null);
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -40,7 +38,7 @@
createConnections(null);
navigator.webkitGetUserMedia({video: true, audio: true},
addStreamToBothConnectionsAndNegotiate,
printGetUserMediaError);
failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -69,7 +67,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
detectVideoPlaying('remote-view-1').then(() => {
assertEquals(gLocalStream.getVideoTracks().length, 1);
......@@ -77,7 +75,8 @@
detectBlackVideo('remote-view-1')
.then(reportTestSuccess);
});
})
.catch(failTest);
}
// Test that we can setup call with an audio and video track and check that
......@@ -88,7 +87,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
function hasExpectedResolution(elementName) {
// Returns a promise that video is playing and has the expected
......@@ -115,12 +114,12 @@
negotiate();
waitForConnectionToStabilize(gFirstConnection).then(() => {
return navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToTheFirstConnectionAndNegotiate)
.catch(printGetUserMediaError);
.then(addStreamToTheFirstConnectionAndNegotiate);
}).then(() => {
// Only the first connection is sending here.
return detectVideoPlaying('remote-view-2');
}).then(reportTestSuccess);
}).then(reportTestSuccess)
.catch(failTest);
}
// The second set of constraints should request video (e.g. video:true) since
......@@ -129,20 +128,20 @@
createConnections(null);
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
waitForConnectionToStabilize(gFirstConnection).then(() => {
gFirstConnection.removeStream(gLocalStream);
gSecondConnection.removeStream(gLocalStream);
}).then(() => {
return navigator.mediaDevices.getUserMedia(renegotiationConstraints)
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.then(addStreamToBothConnectionsAndNegotiate);
}).then(() => {
return Promise.all([
detectVideoPlaying('remote-view-1'),
detectVideoPlaying('remote-view-1')]);
}).then(reportTestSuccess);
}).then(reportTestSuccess)
.catch(failTest);
}
// First makes a call between pc1 and pc2 where a stream is sent from pc1 to
......@@ -154,7 +153,7 @@
navigator.mediaDevices.getUserMedia(constraints)
.then(addStreamToTheFirstConnectionAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
console.log('Initial setup done. Waiting.');
......@@ -170,7 +169,8 @@
// Wait for video to be forwarded back to connection 1.
detectVideoPlaying('remote-view-1')
.then(reportTestSuccess);
});
})
.catch(failTest);
}
// First makes a call between pc1 and pc2, and then construct a new media
......@@ -180,7 +180,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
detectVideoPlaying('remote-view-2').then(() => {
// Construct a new media stream with remote tracks.
......@@ -195,7 +195,8 @@
videoElement.src = URL.createObjectURL(newStream);
detectVideoPlaying('remote-view-2')
.then(reportTestSuccess);
});
})
.catch(failTest);
}
// Test that we can setup call with an audio and video track and
......@@ -207,7 +208,7 @@
gTestWithoutMsid = true;
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -229,7 +230,7 @@
});
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
}
// Test that we can't setup a call if one peer does not support encryption
......@@ -245,7 +246,7 @@
});
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
}
// Test that we can negotiate a call with an SDP offer that includes a
......@@ -255,7 +256,7 @@
setOfferSdpTransform(addBandwithControl);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -268,7 +269,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
// Do the DTMF test after we have received video.
detectVideoPlaying('remote-view-2').then(() => {
......@@ -284,7 +285,8 @@
}
};
window.sender.insertDTMF(tones);
});
})
.catch(failTest);
}
function testCreateOfferOptions() {
......@@ -318,7 +320,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
detectVideoPlaying('remote-view-2').then(() => {
// Disable the receiver's remote media stream. Video should stop.
......@@ -332,7 +334,8 @@
detectVideoPlaying('remote-view-2')
.then(reportTestSuccess);
})
});
})
.catch(failTest);
}
// Test call with a new Video MediaStream that has been created based on a
......@@ -341,7 +344,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(createNewVideoStreamAndAddToBothConnections)
.catch(printGetUserMediaError);
.catch(failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -358,7 +361,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(createNewVideoStreamAndAddToBothConnections)
.then(printGetUserMediaError);
.catch(failTest);
Promise.all([
detectVideoPlaying('remote-view-1'),
......@@ -416,7 +419,8 @@
gSecondConnection.addStream(localStream);
negotiate();
});
})
.catch(failTest);
}
// Loads this page inside itself using an iframe, and ensures we can make a
......@@ -437,11 +441,6 @@
function onIframeLoaded() {
var iframe = window.document.querySelector('iframe');
// Propagate test success out of the iframe.
iframe.contentWindow.setAllEventsOccuredHandler(
window.parent.reportTestSuccess);
func(iframe);
}
}
......@@ -466,7 +465,6 @@
}
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.catch(printGetUserMediaError)
.then((localStream) => {
displayAndRemember(localStream);
......@@ -497,7 +495,8 @@
verifyHasOneAudioAndVideoTrack(
gSecondConnection.getRemoteStreams()[1]);
})
.then(reportTestSuccess);
.then(reportTestSuccess)
.catch(failTest);
}
function createConnections(constraints) {
......@@ -670,7 +669,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
detectVideoPlaying('remote-view-1').then(() => {
var track = gFirstConnection.getRemoteStreams()[0].getVideoTracks()[0];
......@@ -693,7 +692,7 @@
createConnections(null);
navigator.mediaDevices.getUserMedia({audio: true, video: true})
.then(addStreamToBothConnectionsAndNegotiate)
.catch(printGetUserMediaError);
.catch(failTest);
detectVideoPlaying('remote-view-1').then(() => {
var settings1 = gFirstConnection.getRemoteStreams()[0].getVideoTracks()[0]
......
......@@ -9,23 +9,8 @@ const VIDEO_TAG_HEIGHT = 240;
// Fake video capture background green is of value 135.
const COLOR_BACKGROUND_GREEN = 135;
// Number of test events to occur before the test pass. When the test pass,
// the function gAllEventsOccured is called.
var gNumberOfExpectedEvents = 0;
// Number of events that currently have occurred.
var gNumberOfEvents = 0;
var gAllEventsOccured = function () {};
var gPendingTimeout;
// Use this function to set a function that will be called once all expected
// events has occurred.
function setAllEventsOccuredHandler(handler) {
gAllEventsOccured = handler;
}
// Tells the C++ code we succeeded, which will generally exit the test.
function reportTestSuccess() {
console.log('Test Success');
......@@ -39,7 +24,11 @@ function sendValueToTest(value) {
// Immediately fails the test on the C++ side.
function failTest(reason) {
var error = new Error(reason);
if (reason instanceof Error) {
var error = reason;
} else {
var error = new Error(reason);
}
window.domAutomationController.send(error.stack);
}
......@@ -57,16 +46,6 @@ function cancelTestTimeout() {
gPendingTimeout = null;
}
// Called if getUserMedia fails.
function printGetUserMediaError(error) {
var message = 'getUserMedia request unexpectedly failed:';
if (error.constraintName)
message += ' could not satisfy constraint ' + error.constraintName;
else
message += ' devices not working/user denied access.';
failTest(message);
}
function detectVideoPlaying(videoElementName) {
return detectVideo(videoElementName, isVideoPlaying);
}
......@@ -132,44 +111,45 @@ function detectVideo(videoElementName, predicate) {
});
}
// Calculates the current frame rate and compares to |expected_frame_rate|
// |callback| is triggered with value |true| if the calculated frame rate
// is +-1 the expected or |false| if five calculations fail to match
// |expected_frame_rate|. Calls back with OK if the check passed, otherwise
// an error message.
function validateFrameRate(videoElementName, expected_frame_rate, callback) {
var videoElement = $(videoElementName);
var startTime = new Date().getTime();
var decodedFrames = videoElement.webkitDecodedFrameCount;
var attempts = 0;
if (videoElement.readyState <= HTMLMediaElement.HAVE_CURRENT_DATA ||
videoElement.paused || videoElement.ended) {
failTest("getFrameRate - " + videoElementName + " is not plaing.");
return;
}
var waitVideo = setInterval(function() {
attempts++;
currentTime = new Date().getTime();
deltaTime = (currentTime - startTime) / 1000;
startTime = currentTime;
// Calculate decoded frames per sec.
var fps =
(videoElement.webkitDecodedFrameCount - decodedFrames) / deltaTime;
decodedFrames = videoElement.webkitDecodedFrameCount;
// Calculates the current frame rate and compares to |expectedFrameRate|
// The promise is resolved with value |true| if the calculated frame rate
// is +-1 the expected or rejected with an error message if five calculations
// fail to match |expectedFrameRate|.
function validateFrameRate(videoElementName, expectedFrameRate) {
return new Promise((resolve, reject) => {
var videoElement = $(videoElementName);
var startTime = new Date().getTime();
var decodedFrames = videoElement.webkitDecodedFrameCount;
var attempts = 0;
if (videoElement.readyState <= HTMLMediaElement.HAVE_CURRENT_DATA ||
videoElement.paused || videoElement.ended) {
reject("getFrameRate - " + videoElementName + " is not plaing.");
return;
}
console.log('FrameRate in ' + videoElementName + ' is ' + fps);
if (fps < expected_frame_rate + 1 && fps > expected_frame_rate - 1) {
clearInterval(waitVideo);
callback('OK');
} else if (attempts == 5) {
clearInterval(waitVideo);
callback('Expected frame rate ' + expected_frame_rate + ' for ' +
var waitVideo = setInterval(function() {
attempts++;
currentTime = new Date().getTime();
deltaTime = (currentTime - startTime) / 1000;
startTime = currentTime;
// Calculate decoded frames per sec.
var fps =
(videoElement.webkitDecodedFrameCount - decodedFrames) / deltaTime;
decodedFrames = videoElement.webkitDecodedFrameCount;
console.log('FrameRate in ' + videoElementName + ' is ' + fps);
if (fps < expectedFrameRate + 1 && fps > expectedFrameRate - 1) {
clearInterval(waitVideo);
resolve(true);
} else if (attempts == 5) {
clearInterval(waitVideo);
reject('Expected frame rate ' + expectedFrameRate + ' for ' +
'element ' + videoElementName + ', but got ' + fps);
}
}, 1000);
}
}, 1000);
});
}
function waitForConnectionToStabilize(peerConnection) {
......@@ -183,22 +163,6 @@ function waitForConnectionToStabilize(peerConnection) {
});
}
// Adds an expected event. You may call this function many times to add more
// expected events. Each expected event must later be matched by a call to
// eventOccurred. When enough events have occurred, the "all events occurred
// handler" will be called.
function addExpectedEvent() {
++gNumberOfExpectedEvents;
}
// See addExpectedEvent.
function eventOccured() {
++gNumberOfEvents;
if (gNumberOfEvents == gNumberOfExpectedEvents) {
gAllEventsOccured();
}
}
// This very basic video verification algorithm will be satisfied if any
// pixels are changed.
function isVideoPlaying(pixels, previousPixels) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment