Commit 97203bbb authored by Guido Urdaneta's avatar Guido Urdaneta Committed by Commit Bot

[RTCInsertableStreams] Add web tests for audio insertable streams

Drive-by: update video tests to return correct promise and improve
style.

Bug: 1052765
Change-Id: Id1c31b577f0431b2073f0ae486926283aca41759
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2132200
Commit-Queue: Guido Urdaneta <guidou@chromium.org>
Reviewed-by: default avatarMarina Ciocea <marinaciocea@chromium.org>
Cr-Commit-Position: refs/heads/master@{#755919}
parent 607916e6
<!DOCTYPE html>
<meta charset="utf-8">
<!-- Based on similar tests in html/infrastructure/safe-passing-of-structured-data/shared-array-buffers/ -->
<title>RTCEncodedVideoFrame cannot cross agent clusters, service worker edition</title>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../webrtc/RTCPeerConnection-helper.js"></script>
<script src="../service-workers/service-worker/resources/test-helpers.sub.js"></script>
<script>
"use strict";
promise_test(async t => {
const caller = new RTCPeerConnection({forceEncodedAudioInsertableStreams:true});
t.add_cleanup(() => caller.close());
const callee = new RTCPeerConnection();
t.add_cleanup(() => callee.close());
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
const track = stream.getTracks()[0];
t.add_cleanup(() => track.stop());
const sender = caller.addTrack(track)
const streams = sender.createEncodedAudioStreams();
const reader = streams.readableStream.getReader();
const writer = streams.writableStream.getWriter();
exchangeIceCandidates(caller, callee);
await doSignalingHandshake(caller, callee);
const result = await reader.read();
const scope = "resources/blank.html";
let reg = await service_worker_unregister_and_register(t, "resources/serviceworker-failure.js", scope);
t.add_cleanup(() => service_worker_unregister(t, scope));
await wait_for_state(t, reg.installing, "activated");
let iframe = await with_iframe(scope);
t.add_cleanup(() => iframe.remove());
const sw = iframe.contentWindow.navigator.serviceWorker;
let state = "start in window";
return new Promise(resolve => {
sw.onmessage = t.step_func(e => {
if (e.data === "start in worker") {
assert_equals(state, "start in window");
sw.controller.postMessage(result.value);
state = "we are expecting confirmation of an onmessageerror in the worker";
} else if (e.data === "onmessageerror was received in worker") {
assert_equals(state, "we are expecting confirmation of an onmessageerror in the worker");
resolve();
} else {
assert_unreached("Got an unexpected message from the service worker: " + e.data);
}
});
sw.controller.postMessage(state);
});
});
</script>
......@@ -29,17 +29,13 @@ promise_test(async t => {
const result = await senderReader.read();
const scope = "resources/blank.html";
return service_worker_unregister_and_register(t, "resources/serviceworker-failure.js", scope)
.then(reg => {
const reg = await service_worker_unregister_and_register(t, "resources/serviceworker-failure.js", scope)
t.add_cleanup(() => service_worker_unregister(t, scope));
return wait_for_state(t, reg.installing, "activated");
})
.then(() => with_iframe(scope))
.then(iframe => {
await wait_for_state(t, reg.installing, "activated");
const iframe = await with_iframe(scope);
t.add_cleanup(() => iframe.remove());
const sw = iframe.contentWindow.navigator.serviceWorker;
let state = "start in window";
return new Promise(resolve => {
sw.onmessage = t.step_func(e => {
if (e.data === "start in worker") {
......@@ -56,6 +52,5 @@ promise_test(async t => {
sw.controller.postMessage(state);
});
});
});
</script>
......@@ -8,14 +8,14 @@ self.onmessage = e => {
if (e.data === "start in window") {
assert_equals(state, "start in worker");
e.source.postMessage(state);
state = "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame";
state = "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame or RTCEncodedAudioFrame";
} else {
e.source.postMessage(`worker onmessage was reached when in state "${state}" and data ${e.data}`);
}
};
self.onmessageerror = e => {
if (state === "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame") {
if (state === "we are expecting a messageerror due to the window sending us an RTCEncodedVideoFrame or RTCEncodedAudioFrame") {
assert_equals(e.constructor.name, "ExtendableMessageEvent", "type");
assert_equals(e.data, null, "data");
assert_equals(e.origin, self.origin, "origin");
......
<!DOCTYPE html>
<html>
<head>
<title>RTCPeerConnection.getRemoteStreams</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../../external/wpt/webrtc/RTCPeerConnection-helper.js">
</script></head>
<body>
<script>
function areArrayBuffersEqual(buffer1, buffer2)
{
if (buffer1.byteLength != buffer2.byteLength)
return false;
let array1 = new Int8Array(buffer1);
var array2 = new Int8Array(buffer2);
for (let i = 0 ; i < buffer1.byteLength ; ++i) {
if (array1[i] != array2[i])
return false;
}
return true;
}
function areFrameInfosEqual(frame1, frame2) {
return areArrayBuffersEqual(frame1.data, frame2.data);
}
async function doInverseSignalingHandshake(pc1, pc2) {
const offer = await pc2.createOffer({offerToReceiveAudio: true, offerToReceiveVideo: true});
await pc1.setRemoteDescription(offer);
await pc2.setLocalDescription(offer);
const answer = await pc1.createAnswer();
await pc2.setRemoteDescription(answer);
await pc1.setLocalDescription(answer);
}
async function testAudioFlow(t, negotiationFunction) {
const caller = new RTCPeerConnection({forceEncodedAudioInsertableStreams:true});
t.add_cleanup(() => caller.close());
const callee = new RTCPeerConnection({forceEncodedAudioInsertableStreams:true});
t.add_cleanup(() => callee.close());
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
const audioTrack = stream.getAudioTracks()[0];
t.add_cleanup(() => audioTrack.stop());
const audioSender = caller.addTrack(audioTrack)
const senderStreams = audioSender.createEncodedAudioStreams();
const senderReader = senderStreams.readableStream.getReader();
const senderWriter = senderStreams.writableStream.getWriter();
const frameInfos = [];
const numFramesPassthrough = 5;
const numFramesReplaceData = 5;
const numFramesModifyData = 5;
const numFramesToSend = numFramesPassthrough + numFramesReplaceData + numFramesModifyData;
const ontrackPromise = new Promise(resolve => {
callee.ontrack = t.step_func(() => {
const audioReceiver = callee.getReceivers().find(r => r.track.kind === 'audio');
assert_true(audioReceiver !== undefined);
const receiverStreams =
audioReceiver.createEncodedAudioStreams();
const receiverReader = receiverStreams.readableStream.getReader();
const receiverWriter = receiverStreams.writableStream.getWriter();
// The WebRTC stack may occassionally deliver duplicate frames on the
// receiver side, therefore start twice as many read attempts as sent
// frames to account for this.
const maxFramesToReceive = numFramesToSend * 20;
let numVerifiedFrames = 0;
for (let i = 0; i < maxFramesToReceive; i++) {
receiverReader.read().then(t.step_func(result => {
if (frameInfos[numVerifiedFrames] &&
areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames])) {
numVerifiedFrames++;
} else if (frameInfos[numVerifiedFrames-1] &&
areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames-1])) {
// Duplicate frame. It can happen occassionally. Ignore.
} else {
// Receiving unexpected (nonduplicate) frames is an indication that
// frames are not passed correctly between sender and receiver.
assert_unreached("Incorrect frame received");
}
if (numVerifiedFrames == numFramesToSend)
resolve();
})).catch(e=>console.error(e));
}
});
});
exchangeIceCandidates(caller, callee);
await negotiationFunction(caller, callee);
// Pass frames as they come from the encoder.
for (let i = 0; i < numFramesPassthrough; i++) {
const result = await senderReader.read()
frameInfos.push({data: result.value.data});
senderWriter.write(result.value);
}
// Replace frame data with arbitrary buffers.
for (let i = 0; i < numFramesReplaceData; i++) {
const result = await senderReader.read()
const buffer = new ArrayBuffer(100);
const int8View = new Int8Array(buffer);
int8View.fill(i);
result.value.data = buffer;
frameInfos.push({data: result.value.data});
senderWriter.write(result.value);
}
// Modify frame data.
for (let i = 0; i < numFramesReplaceData; i++) {
const result = await senderReader.read()
const int8View = new Int8Array(result.value.data);
int8View.fill(i);
frameInfos.push({data: result.value.data});
senderWriter.write(result.value);
}
return ontrackPromise;
}
promise_test(async t => {
return testAudioFlow(t, doSignalingHandshake);
}, 'Frames flow correctly using insertable streams');
promise_test(async t => {
return testAudioFlow(t, doInverseSignalingHandshake);
}, 'Frames flow correctly using insertable streams when receiver starts negotiation');
promise_test(async t => {
const caller = new RTCPeerConnection();
t.add_cleanup(() => caller.close());
const callee = new RTCPeerConnection();
t.add_cleanup(() => callee.close());
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
const audioTrack = stream.getAudioTracks()[0];
t.add_cleanup(() => audioTrack.stop());
exchangeIceCandidates(caller, callee);
await doSignalingHandshake(caller, callee);
const audioSender = caller.addTrack(audioTrack);
assert_throws_dom("InvalidStateError", () => audioSender.createEncodedAudioStreams());
}, 'RTCRtpSender.createEncodedAudioStream() throws if not requested in PC configuration');
promise_test(async t => {
const caller = new RTCPeerConnection();
t.add_cleanup(() => caller.close());
const callee = new RTCPeerConnection();
t.add_cleanup(() => callee.close());
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
const audioTrack = stream.getAudioTracks()[0];
t.add_cleanup(() => audioTrack.stop());
const audioSender = caller.addTrack(audioTrack);
const ontrackPromise = new Promise(resolve => {
callee.ontrack = t.step_func(() => {
const audioReceiver = callee.getReceivers().find(r => r.track.kind === 'audio');
assert_true(audioReceiver !== undefined);
assert_throws_dom("InvalidStateError", () => audioReceiver.createEncodedAudioStreams());
resolve();
});
});
exchangeIceCandidates(caller, callee);
await doSignalingHandshake(caller, callee);
return ontrackPromise;
}, 'RTCRtpReceiver.createEncodedAudioStream() throws if not requested in PC configuration');
promise_test(async t => {
const caller = new RTCPeerConnection({forceEncodedAudioInsertableStreams:true});
t.add_cleanup(() => caller.close());
const callee = new RTCPeerConnection();
t.add_cleanup(() => callee.close());
const stream = await navigator.mediaDevices.getUserMedia({audio:true});
const track = stream.getTracks()[0];
t.add_cleanup(() => track.stop());
const sender = caller.addTrack(track)
const senderStreams = sender.createEncodedAudioStreams();
const senderWorker = new Worker('RTCPeerConnection-sender-worker-single-frame.js')
senderWorker.postMessage(
{readableStream: senderStreams.readableStream},
[senderStreams.readableStream]);
let expectedFrameData = null;
let verifiedFrameData = false;
let numVerifiedFrames = 0;
const onmessagePromise = new Promise(resolve => {
senderWorker.onmessage = t.step_func(message => {
if (!(message.data instanceof RTCEncodedAudioFrame)) {
// This is the first message sent from the Worker to the test.
// It contains an object (not an RTCEncodedAudioFrame) with the same
// fields as the RTCEncodedAudioFrame to be sent in follow-up messages.
// These serve as expected values to validate that the
// RTCEncodedAudioFrame is sent correctly back to the test in the next
// message.
expectedFrameData = message.data;
} else {
// This is the frame sent by the Worker after reading it from the
// readable stream. The Worker sends it twice after sending the
// verification message.
assert_equals(message.data.type, expectedFrameData.type);
assert_equals(message.data.timestamp, expectedFrameData.timestamp);
assert_true(areArrayBuffersEqual(message.data.data, expectedFrameData.data));
if (++numVerifiedFrames == 2)
resolve();
}
});
});
exchangeIceCandidates(caller, callee);
await doSignalingHandshake(caller, callee);
return onmessagePromise;
}, 'RTCRtpSender readable stream transferred to a Worker and the Worker sends an RTCEncodedAudioFrame back');
</script>
</body>
</html>
......@@ -128,6 +128,8 @@ async function testVideoFlow(t, negotiationFunction) {
frameInfos.push({timestamp: result.value.timestamp, data: result.value.data});
senderWriter.write(result.value);
}
return ontrackPromise;
}
promise_test(async t => {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment