Commit 965d3c64 authored by Guido Urdaneta's avatar Guido Urdaneta Committed by Commit Bot

[RTCInsertableStreams] Add validation for additionalData field in tests

This CL adds verification that the deprecated additionalData field
in video frames can be constructed based on the frame's metadata.

Bug: 1069295
Change-Id: I9b6c06a9e6126d4461cb1c15ce8981312def07c6
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2378310Reviewed-by: default avatarHarald Alvestrand <hta@chromium.org>
Commit-Queue: Guido Urdaneta <guidou@chromium.org>
Cr-Commit-Position: refs/heads/master@{#802656}
parent 40203c99
...@@ -9,9 +9,6 @@ ...@@ -9,9 +9,6 @@
</head> </head>
<body> <body>
<script> <script>
// TODO(crbug.com/1058021): Move this test to external/wpt/webrtc/ once the
// Insertable Streams spec is mature enough.
async function testVideoFlow(t, negotiationFunction) { async function testVideoFlow(t, negotiationFunction) {
const caller = new RTCPeerConnection({encodedInsertableStreams:true}); const caller = new RTCPeerConnection({encodedInsertableStreams:true});
t.add_cleanup(() => caller.close()); t.add_cleanup(() => caller.close());
...@@ -47,6 +44,7 @@ async function testVideoFlow(t, negotiationFunction) { ...@@ -47,6 +44,7 @@ async function testVideoFlow(t, negotiationFunction) {
let numVerifiedFrames = 0; let numVerifiedFrames = 0;
for (let i = 0; i < maxFramesToReceive; i++) { for (let i = 0; i < maxFramesToReceive; i++) {
receiverReader.read().then(t.step_func(result => { receiverReader.read().then(t.step_func(result => {
verifyNonstandardAdditionalDataIfPresent(result.value);
if (frameInfos[numVerifiedFrames] && if (frameInfos[numVerifiedFrames] &&
areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames])) { areFrameInfosEqual(result.value, frameInfos[numVerifiedFrames])) {
numVerifiedFrames++; numVerifiedFrames++;
...@@ -71,6 +69,7 @@ async function testVideoFlow(t, negotiationFunction) { ...@@ -71,6 +69,7 @@ async function testVideoFlow(t, negotiationFunction) {
const result = await senderReader.read(); const result = await senderReader.read();
const metadata = result.value.getMetadata(); const metadata = result.value.getMetadata();
assert_true(containsVideoMetadata(metadata)); assert_true(containsVideoMetadata(metadata));
verifyNonstandardAdditionalDataIfPresent(result.value);
frameInfos.push({ frameInfos.push({
timestamp: result.value.timestamp, timestamp: result.value.timestamp,
type: result.value.type, type: result.value.type,
...@@ -319,7 +318,8 @@ promise_test(async t => { ...@@ -319,7 +318,8 @@ promise_test(async t => {
let numSentDeltaFrames = 0; let numSentDeltaFrames = 0;
// Pass frames as they come from the encoder. // Pass frames as they come from the encoder.
for (let i = 0; i < numFramesToSend; i++) { for (let i = 0; i < numFramesToSend; i++) {
const result = await senderReader.read() const result = await senderReader.read();
verifyNonstandardAdditionalDataIfPresent(result.value);
if (result.value.type == 'key') { if (result.value.type == 'key') {
numSentKeyFrames++; numSentKeyFrames++;
} else { } else {
...@@ -388,6 +388,7 @@ promise_test(async t => { ...@@ -388,6 +388,7 @@ promise_test(async t => {
const receiverReader = receiverStreams.readable.getReader(); const receiverReader = receiverStreams.readable.getReader();
receiverReader.read().then(t.step_func(receivedResult => { receiverReader.read().then(t.step_func(receivedResult => {
assert_true(areFrameInfosEqual(receivedResult.value, sentFrameInfo)); assert_true(areFrameInfosEqual(receivedResult.value, sentFrameInfo));
verifyNonstandardAdditionalDataIfPresent(receivedResult.value);
resolve(); resolve();
})); }));
}); });
...@@ -397,7 +398,8 @@ promise_test(async t => { ...@@ -397,7 +398,8 @@ promise_test(async t => {
await exchangeOfferAnswer(caller, callee); await exchangeOfferAnswer(caller, callee);
// Pass frames as they come from the encoder. // Pass frames as they come from the encoder.
const result = await senderReader.read() const result = await senderReader.read();
verifyNonstandardAdditionalDataIfPresent(result.value);
sentFrameInfo = { sentFrameInfo = {
timestamp: result.value.timestamp, timestamp: result.value.timestamp,
type: result.value.type, type: result.value.type,
......
...@@ -109,3 +109,133 @@ async function exchangeOfferAnswerReverse(pc1, pc2) { ...@@ -109,3 +109,133 @@ async function exchangeOfferAnswerReverse(pc1, pc2) {
await pc2.setRemoteDescription(answer); await pc2.setRemoteDescription(answer);
await pc1.setLocalDescription(answer); await pc1.setLocalDescription(answer);
} }
function createFrameDescriptor(videoFrame) {
const kMaxSpatialLayers = 8;
const kMaxTemporalLayers = 8;
const kMaxNumFrameDependencies = 8;
const metadata = videoFrame.getMetadata();
let frameDescriptor = {
beginningOfSubFrame: true,
endOfSubframe: false,
frameId: metadata.frameId & 0xFFFF,
spatialLayers: 1 << metadata.spatialIndex,
temporalLayer: metadata.temporalLayer,
frameDependenciesDiffs: [],
width: 0,
height: 0
};
for (const dependency of metadata.dependencies) {
frameDescriptor.frameDependenciesDiffs.push(metadata.frameId - dependency);
}
if (metadata.dependencies.length == 0) {
frameDescriptor.width = metadata.width;
frameDescriptor.height = metadata.height;
}
return frameDescriptor;
}
function additionalDataSize(descriptor) {
if (!descriptor.beginningOfSubFrame) {
return 1;
}
let size = 4;
for (const fdiff of descriptor.frameDependenciesDiffs) {
size += (fdiff >= (1 << 6)) ? 2 : 1;
}
if (descriptor.beginningOfSubFrame &&
descriptor.frameDependenciesDiffs.length == 0 &&
descriptor.width > 0 &&
descriptor.height > 0) {
size += 4;
}
return size;
}
// Compute the buffer reported in the additionalData field using the metadata
// provided by a video frame.
// Based on the webrtc::RtpDescriptorAuthentication() C++ function at
// https://source.chromium.org/chromium/chromium/src/+/master:third_party/webrtc/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc
function computeAdditionalData(videoFrame) {
const kMaxSpatialLayers = 8;
const kMaxTemporalLayers = 8;
const kMaxNumFrameDependencies = 8;
const metadata = videoFrame.getMetadata();
if (metadata.spatialIndex < 0 ||
metadata.temporalIndex < 0 ||
metadata.spatialIndex >= kMaxSpatialLayers ||
metadata.temporalIndex >= kMaxTemporalLayers ||
metadata.dependencies.length > kMaxNumFrameDependencies) {
return new ArrayBuffer(0);
}
const descriptor = createFrameDescriptor(videoFrame);
const size = additionalDataSize(descriptor);
const additionalData = new ArrayBuffer(size);
const data = new Uint8Array(additionalData);
const kFlagBeginOfSubframe = 0x80;
const kFlagEndOfSubframe = 0x40;
const kFlagFirstSubframeV00 = 0x20;
const kFlagLastSubframeV00 = 0x10;
const kFlagDependencies = 0x08;
const kFlagMoreDependencies = 0x01;
const kFlageXtendedOffset = 0x02;
let baseHeader =
(descriptor.beginningOfSubFrame ? kFlagBeginOfSubframe : 0) |
(descriptor.endOfSubFrame ? kFlagEndOfSubframe : 0);
baseHeader |= kFlagFirstSubframeV00;
baseHeader |= kFlagLastSubframeV00;
if (!descriptor.beginningOfSubFrame) {
data[0] = baseHeader;
return additionalData;
}
data[0] =
baseHeader |
(descriptor.frameDependenciesDiffs.length == 0 ? 0 : kFlagDependencies) |
descriptor.temporalLayer;
data[1] = descriptor.spatialLayers;
data[2] = descriptor.frameId & 0xFF;
data[3] = descriptor.frameId >> 8;
const fdiffs = descriptor.frameDependenciesDiffs;
let offset = 4;
if (descriptor.beginningOfSubFrame &&
fdiffs.length == 0 &&
descriptor.width > 0 &&
descriptor.height > 0) {
data[offset++] = (descriptor.width >> 8);
data[offset++] = (descriptor.width & 0xFF);
data[offset++] = (descriptor.height >> 8);
data[offset++] = (descriptor.height & 0xFF);
}
for (let i = 0; i < fdiffs.length; i++) {
const extended = fdiffs[i] >= (1 << 6);
const more = i < fdiffs.length - 1;
data[offset++] = ((fdiffs[i] & 0x3f) << 2) |
(extended ? kFlageXtendedOffset : 0) |
(more ? kFlagMoreDependencies : 0);
if (extended) {
data[offset++] = fdiffs[i] >> 6;
}
}
return additionalData;
}
function verifyNonstandardAdditionalDataIfPresent(videoFrame) {
if (videoFrame.additionalData === undefined)
return;
const computedData = computeAdditionalData(videoFrame);
assert_true(areArrayBuffersEqual(videoFrame.additionalData, computedData));
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment