Commit bf175808 authored by Thomas Guilbert's avatar Thomas Guilbert Committed by Commit Bot

[video-raf] Add parameter verificiation tests

This CL adds/modifies three tests:

1. Updates the chained video.rAF callback test to also verify the
required VideoFrameMetadata fields (e.g. dimensions stay constant, time
and presented frames are increasing).

2. Adds a test that makes sure that video.rAF callbacks are
executed before window.rAF callbacks, and in the same turn of the event
loop (as measured by having identical 'time' callback parameters).

3. Adds a test that verifies that all of the WebRTC optional parameters
are present when we expect them to be.

Bug: 1012063
Change-Id: I72461dd42f6da1a03faaee61355e7c191fe4c116
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2130643
Commit-Queue: Guido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarGuido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Auto-Submit: Thomas Guilbert <tguilbert@chromium.org>
Cr-Commit-Position: refs/heads/master@{#759671}
parent b8085fb4
......@@ -595,6 +595,7 @@ void WebMediaPlayerMSCompositor::SetCurrentFrame(
base::TimeTicks now = base::TimeTicks::Now();
last_presentation_time_ = now;
last_expected_display_time_ = expected_display_time.value_or(now);
last_preferred_render_interval_ = GetPreferredRenderInterval();
++presented_frames_;
OnNewFramePresentedCB presented_frame_cb;
......@@ -747,7 +748,7 @@ WebMediaPlayerMSCompositor::GetLastPresentedFrameMetadata() {
frame_metadata->expected_display_time = last_expected_display_time_;
frame_metadata->presented_frames = static_cast<uint32_t>(presented_frames_);
frame_metadata->average_frame_duration = GetPreferredRenderInterval();
frame_metadata->average_frame_duration = last_preferred_render_interval_;
frame_metadata->rendering_interval = last_render_length_;
}
......
......@@ -239,6 +239,12 @@ class MODULES_EXPORT WebMediaPlayerMSCompositor
base::TimeTicks last_expected_display_time_ GUARDED_BY(current_frame_lock_);
size_t presented_frames_ GUARDED_BY(current_frame_lock_) = 0u;
// The value of GetPreferredRenderInterval() the last time |current_frame_|
// was updated. Used by GetLastPresentedFrameMetadata(), to prevent calling
// GetPreferredRenderInterval() from the main thread.
base::TimeDelta last_preferred_render_interval_
GUARDED_BY(current_frame_lock_);
bool stopped_;
bool render_started_;
......
......@@ -36,26 +36,42 @@ async_test(function(t) {
async_test(function(t) {
let video = document.createElement('video');
let numberOfCallsLeft = 10;
let lastPresentedFrames = -1;
let maxNumberOfCalls = 10;
let currentCallNumber = 0;
let lastMetadata;
function frameNumberVerifier(time, metadata) {
assert_greater_than(metadata.presentedFrames, lastPresentedFrames, "presentedFrames should be monotonically increasing");
function verifyMetadata(last, current) {
assert_greater_than(current.presentedFrames, last.presentedFrames, "presentedFrames should be monotonically increasing");
assert_greater_than(current.presentationTime, last.presentationTime, "presentationTime should be monotonically increasing");
assert_greater_than(current.expectedDisplayTime, last.expectedDisplayTime, "expectedDisplayTime should be monotonically increasing");
lastPresentedFrames = metadata.presentedFrames;
// We aren't seeking through the file, so this should be increasing from frame to frame.
assert_greater_than(current.mediaTime, last.mediaTime, "mediaTime should be increasing");
if (--numberOfCallsLeft) {
// The test video's size doesn't change.
assert_equals(current.width, last.width, "width should remain constant");
assert_equals(current.height, last.height, "height should remain constant");
}
function repeatingCallback(time, metadata) {
// Skip the first call to verifyMetadata.
if (currentCallNumber)
verifyMetadata(lastMetadata, metadata)
lastMetadata = metadata;
if (++currentCallNumber > maxNumberOfCalls) {
t.done()
} else {
video.requestVideoFrameCallback(t.step_func(frameNumberVerifier));
video.requestVideoFrameCallback(t.step_func(repeatingCallback));
}
}
video.requestVideoFrameCallback(t.step_func(frameNumberVerifier));
video.requestVideoFrameCallback(t.step_func(repeatingCallback));
video.src = getVideoURI('/media/movie_5');
video.play();
}, 'Test chaining calls to video.rVFC.');
}, 'Test chaining calls to video.rVFC, and verify the required parameters.');
</script>
</html>
<!doctype html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>WebRTC video.requestVideoFrameCallback() test</title>
<script src="/webrtc/RTCPeerConnection-helper.js"></script>
</head>
<body>
<div id="log"></div>
<div>
<video id="local-view" muted autoplay="autoplay"></video>
<video id="remote-view" muted autoplay="autoplay"/>
</video>
</div>
<!-- These files are in place when executing on W3C. -->
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="text/javascript">
var test = async_test('Test video.requestVideoFrameCallback() parameters for WebRTC applications.');
//
// This test is based on /webrtc/simplecall.https.html, but it calls to
// video.requestVideoFrameCallback() before ending, to verify WebRTC required
// and optional parameters.
//
var gFirstConnection = null;
var gSecondConnection = null;
var gCallbackCounter = 0;
var verify_params = (now, metadata) => {
gCallbackCounter = gCallbackCounter + 1;
assert_greater_than(now, 0);
// Verify all required fields
assert_greater_than(metadata.presentationTime, 0);
assert_greater_than(metadata.expectedDisplayTime, 0);
assert_greater_than(metadata.presentedFrames, 0);
assert_greater_than(metadata.width, 0);
assert_greater_than(metadata.height, 0);
assert_true("mediaTime" in metadata, "mediaTime should be present");
// Verify WebRTC only fields.
assert_true("rtpTimestamp" in metadata, "rtpTimestamp should be present");
assert_true("receiveTime" in metadata, "receiveTime should be present");
// captureTime is not available until roundtrip time estimation is done.
if (gCallbackCounter > 60 || "captureTime" in metadata) {
assert_true("captureTime" in metadata, "captureTime should be present");
test.done();
}
else {
// Keep requesting callbacks.
document.getElementById('remote-view').requestVideoFrameCallback(test.step_func(verify_params));
}
}
// If the remote video gets video data that implies the negotiation
// as well as the ICE and DTLS connection are up.
document.getElementById('remote-view')
.addEventListener('loadedmetadata', function() {
document.getElementById('remote-view').requestVideoFrameCallback(test.step_func(verify_params));
});
function getNoiseStreamOkCallback(localStream) {
gFirstConnection = new RTCPeerConnection(null);
gFirstConnection.onicecandidate = onIceCandidateToFirst;
gSecondConnection = new RTCPeerConnection(null);
gSecondConnection.onicecandidate = onIceCandidateToSecond;
gSecondConnection.ontrack = onRemoteTrack;
localStream.getTracks().forEach(function(track) {
// Bidirectional streams are needed in order for captureTime to be
// populated. Use the same source in both directions.
gFirstConnection.addTrack(track, localStream);
gSecondConnection.addTrack(track, localStream);
});
gFirstConnection.createOffer().then(onOfferCreated, failed('createOffer'));
var videoTag = document.getElementById('local-view');
videoTag.srcObject = localStream;
};
var onOfferCreated = test.step_func(function(offer) {
gFirstConnection.setLocalDescription(offer);
// This would normally go across the application's signaling solution.
// In our case, the "signaling" is to call this function.
receiveCall(offer.sdp);
});
function receiveCall(offerSdp) {
var parsedOffer = new RTCSessionDescription({ type: 'offer',
sdp: offerSdp });
gSecondConnection.setRemoteDescription(parsedOffer);
gSecondConnection.createAnswer().then(onAnswerCreated,
failed('createAnswer'));
};
var onAnswerCreated = test.step_func(function(answer) {
gSecondConnection.setLocalDescription(answer);
// Similarly, this would go over the application's signaling solution.
handleAnswer(answer.sdp);
});
function handleAnswer(answerSdp) {
var parsedAnswer = new RTCSessionDescription({ type: 'answer',
sdp: answerSdp });
gFirstConnection.setRemoteDescription(parsedAnswer);
};
var onIceCandidateToFirst = test.step_func(function(event) {
// If event.candidate is null = no more candidates.
if (event.candidate) {
gSecondConnection.addIceCandidate(event.candidate);
}
});
var onIceCandidateToSecond = test.step_func(function(event) {
if (event.candidate) {
gFirstConnection.addIceCandidate(event.candidate);
}
});
var onRemoteTrack = test.step_func(function(event) {
var videoTag = document.getElementById('remote-view');
if (!videoTag.srcObject) {
videoTag.srcObject = event.streams[0];
}
});
// Returns a suitable error callback.
function failed(function_name) {
return test.unreached_func('WebRTC called error callback for ' + function_name);
}
// This function starts the test.
test.step(function() {
getNoiseStream({ video: true, audio: true })
.then(test.step_func(getNoiseStreamOkCallback), failed('getNoiseStream'));
});
</script>
</body>
</html>
\ No newline at end of file
......@@ -29,6 +29,24 @@ async_test(function(t) {
}, 'Test we can register a video.rVFC callback.');
async_test(function(t) {
let video = document.createElement('video');
video.requestVideoFrameCallback(
t.step_func(video_now => {
// Queue a call to window.rAF, and make sure it is executed within the
// same turn of the event loop (with the same 'time' parameter).
window.requestAnimationFrame( t.step_func_done( window_now => {
assert_equals(video_now, window_now);
}));
})
);
video.src = testVideo.url;
video.play();
}, 'Test video.rVFC callbacks run before window.rAF callbacks.');
async_test(function(t) {
let video = document.createElement('video');
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment