Commit e5798b2c authored by Thomas Guilbert's avatar Thomas Guilbert Committed by Chromium LUCI CQ

Expose VideoEncoder to workers

This CL mirrors the changes landed in
acbd2eae, and exposes VideoEncoder to
workers.

The changes in this CL rely on the conclusions from the previous CL.
Notably, there is a need for a synchronous wait when getting the GPU
factories due to some threading issues, and there is no support for
logging from workers.

Bug: 1094169
Change-Id: I2da6cdb2afd7b80e342268cbbfcb00fea64ad37f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2578002
Commit-Queue: Thomas Guilbert <tguilbert@chromium.org>
Reviewed-by: default avatarEugene Zemtsov <eugene@chromium.org>
Reviewed-by: default avatarDan Sanders <sandersd@chromium.org>
Cr-Commit-Position: refs/heads/master@{#835551}
parent 620f0cb1
......@@ -45,16 +45,23 @@
#include "third_party/blink/renderer/platform/bindings/exception_state.h"
#include "third_party/blink/renderer/platform/bindings/script_state.h"
#include "third_party/blink/renderer/platform/instrumentation/use_counter.h"
#include "third_party/blink/renderer/platform/scheduler/public/post_cross_thread_task.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread.h"
#include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
namespace blink {
namespace {
media::GpuVideoAcceleratorFactories* GetGpuFactoriesOnMainThread() {
DCHECK(IsMainThread());
return Platform::Current()->GetGpuFactories();
}
std::unique_ptr<media::VideoEncoder> CreateAcceleratedVideoEncoder(
media::VideoCodecProfile profile,
const media::VideoEncoder::Options& options) {
auto* gpu_factories = Platform::Current()->GetGpuFactories();
const media::VideoEncoder::Options& options,
media::GpuVideoAcceleratorFactories* gpu_factories) {
if (!gpu_factories || !gpu_factories->IsGpuVideoAcceleratorEnabled())
return nullptr;
......@@ -94,7 +101,7 @@ std::unique_ptr<media::VideoEncoder> CreateAcceleratedVideoEncoder(
if (!found_supported_profile)
return nullptr;
auto task_runner = Thread::MainThread()->GetTaskRunner();
auto task_runner = Thread::Current()->GetTaskRunner();
return std::make_unique<
media::AsyncDestroyVideoEncoder<media::VideoEncodeAcceleratorAdapter>>(
std::make_unique<media::VideoEncodeAcceleratorAdapter>(
......@@ -136,8 +143,14 @@ VideoEncoder::VideoEncoder(ScriptState* script_state,
UseCounter::Count(ExecutionContext::From(script_state),
WebFeature::kWebCodecs);
logger_ = std::make_unique<CodecLogger>(
GetExecutionContext(), Thread::MainThread()->GetTaskRunner());
// TODO(crbug.com/1151005): Use a real MediaLog in worker contexts too.
if (IsMainThread()) {
logger_ = std::make_unique<CodecLogger>(
GetExecutionContext(), Thread::MainThread()->GetTaskRunner());
} else {
// This will create a logger backed by a NullMediaLog, which does nothing.
logger_ = std::make_unique<CodecLogger>();
}
media::MediaLog* log = logger_->log();
......@@ -263,13 +276,19 @@ void VideoEncoder::UpdateEncoderLog(std::string encoder_name,
is_hw_accelerated);
}
void VideoEncoder::CreateAndInitializeEncoderOnEncoderSupportKnown(
void VideoEncoder::CreateAndInitializeEncoderWithoutAcceleration(
Request* request) {
CreateAndInitializeEncoderOnEncoderSupportKnown(request, nullptr);
}
void VideoEncoder::CreateAndInitializeEncoderOnEncoderSupportKnown(
Request* request,
media::GpuVideoAcceleratorFactories* gpu_factories) {
DCHECK(active_config_);
DCHECK_EQ(request->type, Request::Type::kConfigure);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
media_encoder_ = CreateMediaVideoEncoder(*active_config_);
media_encoder_ = CreateMediaVideoEncoder(*active_config_, gpu_factories);
if (!media_encoder_) {
HandleError(logger_->MakeException(
"Encoder creation error.",
......@@ -308,20 +327,21 @@ void VideoEncoder::CreateAndInitializeEncoderOnEncoderSupportKnown(
}
std::unique_ptr<media::VideoEncoder> VideoEncoder::CreateMediaVideoEncoder(
const ParsedConfig& config) {
const ParsedConfig& config,
media::GpuVideoAcceleratorFactories* gpu_factories) {
// TODO(https://crbug.com/1119636): Implement / call a proper method for
// detecting support of encoder configs.
switch (config.acc_pref) {
case AccelerationPreference::kRequire: {
auto result =
CreateAcceleratedVideoEncoder(config.profile, config.options);
auto result = CreateAcceleratedVideoEncoder(
config.profile, config.options, gpu_factories);
if (result)
UpdateEncoderLog("AcceleratedVideoEncoder", true);
return result;
}
case AccelerationPreference::kAllow:
if (auto result =
CreateAcceleratedVideoEncoder(config.profile, config.options)) {
if (auto result = CreateAcceleratedVideoEncoder(
config.profile, config.options, gpu_factories)) {
UpdateEncoderLog("AcceleratedVideoEncoder", true);
return result;
}
......@@ -576,30 +596,53 @@ void VideoEncoder::ProcessEncode(Request* request) {
request->frame->destroy();
}
void VideoEncoder::OnReceivedGpuFactories(
Request* request,
media::GpuVideoAcceleratorFactories* gpu_factories) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!gpu_factories || !gpu_factories->IsGpuVideoAcceleratorEnabled()) {
CreateAndInitializeEncoderWithoutAcceleration(request);
return;
}
// Delay create the hw encoder until HW encoder support is known, so that
// GetVideoEncodeAcceleratorSupportedProfiles() can give a reliable answer.
auto on_encoder_support_known_cb = WTF::Bind(
&VideoEncoder::CreateAndInitializeEncoderOnEncoderSupportKnown,
WrapCrossThreadWeakPersistent(this), WrapCrossThreadPersistent(request),
CrossThreadUnretained(gpu_factories));
gpu_factories->NotifyEncoderSupportKnown(
std::move(on_encoder_support_known_cb));
}
void VideoEncoder::ProcessConfigure(Request* request) {
DCHECK_NE(state_.AsEnum(), V8CodecState::Enum::kClosed);
DCHECK_EQ(request->type, Request::Type::kConfigure);
DCHECK(active_config_);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
auto* gpu_factories = Platform::Current()->GetGpuFactories();
stall_request_processing_ = true;
bool deny_hardware_encoder =
active_config_->acc_pref == AccelerationPreference::kDeny;
if (!deny_hardware_encoder && gpu_factories &&
gpu_factories->IsGpuVideoAcceleratorEnabled()) {
// Delay create the hw encoder until HW encoder support is known, so that
// GetVideoEncodeAcceleratorSupportedProfiles() can give a reliable answer.
auto on_encoder_support_known_cb = WTF::Bind(
&VideoEncoder::CreateAndInitializeEncoderOnEncoderSupportKnown,
WrapCrossThreadWeakPersistent(this),
WrapCrossThreadPersistent(request));
gpu_factories->NotifyEncoderSupportKnown(
std::move(on_encoder_support_known_cb));
} else {
CreateAndInitializeEncoderOnEncoderSupportKnown(request);
if (active_config_->acc_pref == AccelerationPreference::kDeny) {
CreateAndInitializeEncoderWithoutAcceleration(request);
return;
}
if (IsMainThread()) {
OnReceivedGpuFactories(request, Platform::Current()->GetGpuFactories());
return;
}
auto on_gpu_factories_cb = CrossThreadBindOnce(
&VideoEncoder::OnReceivedGpuFactories,
WrapCrossThreadWeakPersistent(this), WrapCrossThreadPersistent(request));
Thread::MainThread()->GetTaskRunner()->PostTaskAndReplyWithResult(
FROM_HERE,
ConvertToBaseOnceCallback(
CrossThreadBindOnce(&GetGpuFactoriesOnMainThread)),
ConvertToBaseOnceCallback(std::move(on_gpu_factories_cb)));
}
void VideoEncoder::ProcessReconfigure(Request* request) {
......
......@@ -25,6 +25,7 @@
#include "third_party/blink/renderer/platform/context_lifecycle_observer.h"
namespace media {
class GpuVideoAcceleratorFactories;
class VideoEncoder;
struct VideoEncoderOutput;
} // namespace media
......@@ -133,11 +134,17 @@ class MODULES_EXPORT VideoEncoder final
void ResetInternal();
ScriptPromiseResolver* MakePromise();
void OnReceivedGpuFactories(Request*, media::GpuVideoAcceleratorFactories*);
ParsedConfig* ParseConfig(const VideoEncoderConfig*, ExceptionState&);
bool VerifyCodecSupport(ParsedConfig*, ExceptionState&);
void CreateAndInitializeEncoderOnEncoderSupportKnown(Request* request);
void CreateAndInitializeEncoderWithoutAcceleration(Request* request);
void CreateAndInitializeEncoderOnEncoderSupportKnown(
Request* request,
media::GpuVideoAcceleratorFactories* gpu_factories);
std::unique_ptr<media::VideoEncoder> CreateMediaVideoEncoder(
const ParsedConfig& config);
const ParsedConfig& config,
media::GpuVideoAcceleratorFactories* gpu_factories);
bool CanReconfigure(ParsedConfig& original_config, ParsedConfig& new_config);
std::unique_ptr<CodecLogger> logger_;
......
......@@ -5,7 +5,7 @@
// https://github.com/WICG/web-codecs
[
Exposed=Window,
Exposed=(Window,DedicatedWorker),
RuntimeEnabled=WebCodecs,
ActiveScriptWrappable
] interface VideoEncoder {
......
......@@ -747,8 +747,10 @@ crbug.com/591099 external/wpt/html/links/manifest/wrong-mime-type-text-plain-man
crbug.com/591099 virtual/stable/compositing/filters/sw-nested-shadow-overlaps-hw-nested-shadow.html [ Failure ]
### webcodecs/
crbug.com/591099 webcodecs/basic_video_encoding.html [ Failure ]
crbug.com/591099 webcodecs/reconfiguring_encooder.html [ Failure ]
crbug.com/591099 wpt_internal/webcodecs/basic_video_encoding.any.html [ Failure ]
crbug.com/591099 wpt_internal/webcodecs/basic_video_encoding.any.worker.html [ Failure ]
crbug.com/591099 wpt_internal/webcodecs/reconfiguring_encoder.any.html [ Failure ]
crbug.com/591099 wpt_internal/webcodecs/reconfiguring_encoder.any.worker.html [ Failure ]
# broken by https://chromium-review.googlesource.com/c/chromium/src/+/2392444
crbug.com/958381 external/wpt/css/css-flexbox/table-as-item-wide-content.html [ Failure ]
......
......@@ -356,8 +356,10 @@ crbug.com/1139790 virtual/focusless-spat-nav/fast/spatial-navigation/focusless/s
virtual/webrtc-wpt-plan-b/external/wpt/webrtc/protocol/video-codecs.https.html [ FAILURE ]
virtual/webrtc-wpt-plan-b/external/wpt/webrtc/simulcast/h264.https.html [ FAILURE ]
virtual/focusless-spat-nav/fast/spatial-navigation/focusless/snav-focusless-enter-from-interest.html [ FAILURE ]
webcodecs/basic_video_encoding.html [ FAILURE ]
webcodecs/reconfiguring_encooder.html [ Failure ]
wpt_internal/webcodecs/basic_video_encoding.any.html [ FAILURE ]
wpt_internal/webcodecs/basic_video_encoding.any.worker.html [ FAILURE ]
wpt_internal/webcodecs/reconfiguring_encoder.any.html [ FAILURE ]
wpt_internal/webcodecs/reconfiguring_encoder.any.worker.html [ FAILURE ]
external/wpt/webrtc/RTCRtpTransceiver-setCodecPreferences.html [ FAILURE ]
external/wpt/webrtc/protocol/video-codecs.https.html [ FAILURE ]
external/wpt/webrtc/simulcast/h264.https.html [ FAILURE ]
......
// META: global=window,dedicatedworker
// META: script=/common/media.js
// META: script=/webcodecs/utils.js
const defaultConfig = {
codec: 'vp8',
framerate: 25,
width: 640,
height: 480
};
async function generateBitmap(width, height) {
const src = "pattern.png";
var size = {
resizeWidth: width,
resizeHeight: height
};
return fetch(src)
.then(response => response.blob())
.then(blob => createImageBitmap(blob, size));
}
async function createVideoFrame(width, height, timestamp) {
let bitmap = await generateBitmap(width, height);
return new VideoFrame(bitmap, { timestamp: timestamp });
}
promise_test(t => {
// VideoEncoderInit lacks required fields.
assert_throws_js(TypeError, () => { new VideoEncoder({}); });
// VideoEncoderInit has required fields.
let encoder = new VideoEncoder(getDefaultCodecInit(t));
assert_equals(encoder.state, "unconfigured");
encoder.close();
return endAfterEventLoopTurn();
}, 'Test VideoEncoder construction');
promise_test(t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let badCodecsList = [
'', // Empty codec
'bogus', // Non exsitent codec
'vorbis', // Audio codec
'vp9', // Ambiguous codec
'video/webm; codecs="vp9"' // Codec with mime type
]
testConfigurations(encoder, defaultConfig, badCodecsList);
return endAfterEventLoopTurn();
}, 'Test VideoEncoder.configure()');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
// No encodes yet.
assert_equals(encoder.encodeQueueSize, 0);
encoder.configure(defaultConfig);
// Still no encodes.
assert_equals(encoder.encodeQueueSize, 0);
let frame1 = await createVideoFrame(640, 480, 0);
let frame2 = await createVideoFrame(640, 480, 33333);
encoder.encode(frame1.clone());
encoder.encode(frame2.clone());
// Could be 0, 1, or 2. We can't guarantee this check runs before the UA has
// processed the encodes.
assert_true(encoder.encodeQueueSize >= 0 && encoder.encodeQueueSize <= 2)
await encoder.flush();
// We can guarantee that all encodes are processed after a flush.
assert_equals(encoder.encodeQueueSize, 0);
assert_equals(output_chunks.length, 2);
assert_equals(output_chunks[0].timestamp, frame1.timestamp);
assert_equals(output_chunks[1].timestamp, frame2.timestamp);
}, 'Test successful configure(), encode(), and flush()');
promise_test(async t => {
let callbacks_before_reset = 0;
let callbacks_after_reset = 0;
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => {
if (chunk.timestamp % 2 == 0) {
// pre-reset frames have even timestamp
callbacks_before_reset++;
} else {
// after-reset frames have odd timestamp
callbacks_after_reset++;
}
}
let encoder = new VideoEncoder(codecInit);
encoder.configure(defaultConfig);
await encoder.flush();
let frames = [];
for (let i = 0; i < 200; i++) {
let frame = await createVideoFrame(640, 480, i * 40_000);
frames.push(frame);
}
for (frame of frames)
encoder.encode(frame);
// Wait for the first frame to be encoded
await t.step_wait(() => callbacks_before_reset > 0,
"Encoded outputs started coming", 10000, 1);
let saved_callbacks_before_reset = callbacks_before_reset;
assert_greater_than(callbacks_before_reset, 0);
assert_less_than_equal(callbacks_before_reset, frames.length);
encoder.reset();
assert_equals(encoder.encodeQueueSize, 0);
let newConfig = { ...defaultConfig };
newConfig.width = 800;
newConfig.height = 600;
encoder.configure(newConfig);
for (let i = frames.length; i < frames.length + 5; i++) {
let frame = await createVideoFrame(800, 600, i * 40_000 + 1);
encoder.encode(frame);
}
await encoder.flush();
assert_equals(callbacks_after_reset, 5);
assert_equals(saved_callbacks_before_reset, callbacks_before_reset);
assert_equals(encoder.encodeQueueSize, 0);
}, 'Test successful reset() and re-confiugre()');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
// No encodes yet.
assert_equals(encoder.encodeQueueSize, 0);
let config = defaultConfig;
encoder.configure(config);
let frame1 = await createVideoFrame(640, 480, 0);
let frame2 = await createVideoFrame(640, 480, 33333);
encoder.encode(frame1.clone());
encoder.configure(config);
encoder.encode(frame2.clone());
await encoder.flush();
// We can guarantee that all encodes are processed after a flush.
assert_equals(encoder.encodeQueueSize, 0);
// The first frame may have been dropped when reconfiguring.
// This shouldn't happen, and should be fixed/called out in the spec, but
// this is preptively added to prevent flakiness.
// TODO: Remove these checks when implementations handle this correctly.
assert_true(output_chunks.length == 1 || output_chunks.length == 2);
if (output_chunks.length == 1) {
// If we only have one chunk frame, make sure we droped the frame that was
// in flight when we reconfigured.
assert_equals(output_chunks[0].timestamp, frame2.timestamp);
} else {
assert_equals(output_chunks[0].timestamp, frame1.timestamp);
assert_equals(output_chunks[1].timestamp, frame2.timestamp);
}
output_chunks = [];
let frame3 = await createVideoFrame(640, 480, 66666);
let frame4 = await createVideoFrame(640, 480, 100000);
encoder.encode(frame3.clone());
// Verify that a failed call to configure does not change the encoder's state.
let badConfig = { ...defaultConfig };
badConfig.codec = 'bogus';
assert_throws_js(TypeError, () => encoder.configure(badConfig));
encoder.encode(frame4.clone());
await encoder.flush();
assert_equals(output_chunks[0].timestamp, frame3.timestamp);
assert_equals(output_chunks[1].timestamp, frame4.timestamp);
}, 'Test successful encode() after re-configure().');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
let timestamp = 33333;
let frame = await createVideoFrame(640, 480, timestamp);
encoder.configure(defaultConfig);
assert_equals(encoder.state, "configured");
encoder.encode(frame);
// |frame| is not longer valid since it has been destroyed.
assert_not_equals(frame.timestamp, timestamp);
assert_throws_dom("InvalidStateError", () => frame.clone());
encoder.close();
return endAfterEventLoopTurn();
}, 'Test encoder consumes (destroys) frames.');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
return testClosedCodec(t, encoder, defaultConfig, frame);
}, 'Verify closed VideoEncoder operations');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
return testUnconfiguredCodec(t, encoder, frame);
}, 'Verify unconfigured VideoEncoder operations');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
frame.destroy();
encoder.configure(defaultConfig);
frame.destroy();
assert_throws_dom("OperationError", () => {
encoder.encode(frame)
});
}, 'Verify encoding destroyed frames throws.');
<!DOCTYPE html>
<html>
<head>
<title>Test the VideoTrackReader API.</title>
</head>
<body>
<img id='frame_image' src="pattern.png">
</body>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/common/media.js"></script>
<script src="/webcodecs/utils.js"></script>
<script>
const defaultConfig = {
codec: 'vp8',
framerate: 25,
width: 640,
height: 480
};
async function waitTillLoaded(img) {
if (img.complete)
return Promise.resolve();
return new Promise((resolve, reject) => {
img.onload = () => resolve()
});
}
async function generateBitmap(width, height) {
let img = document.getElementById('frame_image');
await waitTillLoaded(img);
return createImageBitmap(img,
{
resizeWidth: width,
resizeHeight: height
});
}
async function createVideoFrame(width, height, timestamp) {
let bitmap = await generateBitmap(width, height);
return new VideoFrame(bitmap, { timestamp: timestamp });
}
promise_test(t => {
// VideoEncoderInit lacks required fields.
assert_throws_js(TypeError, () => { new VideoEncoder({}); });
// VideoEncoderInit has required fields.
let encoder = new VideoEncoder(getDefaultCodecInit(t));
assert_equals(encoder.state, "unconfigured");
encoder.close();
return endAfterEventLoopTurn();
}, 'Test VideoEncoder construction');
promise_test(t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let badCodecsList = [
'', // Empty codec
'bogus', // Non exsitent codec
'vorbis', // Audio codec
'vp9', // Ambiguous codec
'video/webm; codecs="vp9"' // Codec with mime type
]
testConfigurations(encoder, defaultConfig, badCodecsList);
return endAfterEventLoopTurn();
}, 'Test VideoEncoder.configure()');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
// No encodes yet.
assert_equals(encoder.encodeQueueSize, 0);
encoder.configure(defaultConfig);
// Still no encodes.
assert_equals(encoder.encodeQueueSize, 0);
let frame1 = await createVideoFrame(640, 480, 0);
let frame2 = await createVideoFrame(640, 480, 33333);
encoder.encode(frame1.clone());
encoder.encode(frame2.clone());
// Could be 0, 1, or 2. We can't guarantee this check runs before the UA has
// processed the encodes.
assert_true(encoder.encodeQueueSize >= 0 && encoder.encodeQueueSize <= 2)
await encoder.flush();
// We can guarantee that all encodes are processed after a flush.
assert_equals(encoder.encodeQueueSize, 0);
assert_equals(output_chunks.length, 2);
assert_equals(output_chunks[0].timestamp, frame1.timestamp);
assert_equals(output_chunks[1].timestamp, frame2.timestamp);
}, 'Test successful configure(), encode(), and flush()');
promise_test(async t => {
let callbacks_before_reset = 0;
let callbacks_after_reset = 0;
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => {
if (chunk.timestamp % 2 == 0) {
// pre-reset frames have even timestamp
callbacks_before_reset++;
} else {
// after-reset frames have odd timestamp
callbacks_after_reset++;
}
}
let encoder = new VideoEncoder(codecInit);
encoder.configure(defaultConfig);
await encoder.flush();
let frames = [];
for (let i = 0; i < 200; i++) {
let frame = await createVideoFrame(640, 480, i * 40_000);
frames.push(frame);
}
for (frame of frames)
encoder.encode(frame);
// Wait for the first frame to be encoded
await t.step_wait(() => callbacks_before_reset > 0,
"Encoded outputs started coming", 10000, 1);
let saved_callbacks_before_reset = callbacks_before_reset;
assert_greater_than(callbacks_before_reset, 0);
assert_less_than_equal(callbacks_before_reset, frames.length);
encoder.reset();
assert_equals(encoder.encodeQueueSize, 0);
let newConfig = { ...defaultConfig };
newConfig.width = 800;
newConfig.height = 600;
encoder.configure(newConfig);
for (let i = frames.length; i < frames.length + 5; i++) {
let frame = await createVideoFrame(800, 600, i * 40_000 + 1);
encoder.encode(frame);
}
await encoder.flush();
assert_equals(callbacks_after_reset, 5);
assert_equals(saved_callbacks_before_reset, callbacks_before_reset);
assert_equals(encoder.encodeQueueSize, 0);
}, 'Test successful reset() and re-confiugre()');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
// No encodes yet.
assert_equals(encoder.encodeQueueSize, 0);
let config = defaultConfig;
encoder.configure(config);
let frame1 = await createVideoFrame(640, 480, 0);
let frame2 = await createVideoFrame(640, 480, 33333);
encoder.encode(frame1.clone());
encoder.configure(config);
encoder.encode(frame2.clone());
await encoder.flush();
// We can guarantee that all encodes are processed after a flush.
assert_equals(encoder.encodeQueueSize, 0);
// The first frame may have been dropped when reconfiguring.
// This shouldn't happen, and should be fixed/called out in the spec, but
// this is preptively added to prevent flakiness.
// TODO: Remove these checks when implementations handle this correctly.
assert_true(output_chunks.length == 1 || output_chunks.length == 2);
if (output_chunks.length == 1) {
// If we only have one chunk frame, make sure we droped the frame that was
// in flight when we reconfigured.
assert_equals(output_chunks[0].timestamp, frame2.timestamp);
} else {
assert_equals(output_chunks[0].timestamp, frame1.timestamp);
assert_equals(output_chunks[1].timestamp, frame2.timestamp);
}
output_chunks = [];
let frame3 = await createVideoFrame(640, 480, 66666);
let frame4 = await createVideoFrame(640, 480, 100000);
encoder.encode(frame3.clone());
// Verify that a failed call to configure does not change the encoder's state.
let badConfig = { ...defaultConfig };
badConfig.codec = 'bogus';
assert_throws_js(TypeError, () => encoder.configure(badConfig));
encoder.encode(frame4.clone());
await encoder.flush();
assert_equals(output_chunks[0].timestamp, frame3.timestamp);
assert_equals(output_chunks[1].timestamp, frame4.timestamp);
}, 'Test successful encode() after re-configure().');
promise_test(async t => {
let output_chunks = [];
let codecInit = getDefaultCodecInit(t);
codecInit.output = chunk => output_chunks.push(chunk);
let encoder = new VideoEncoder(codecInit);
let timestamp = 33333;
let frame = await createVideoFrame(640, 480, timestamp);
encoder.configure(defaultConfig);
assert_equals(encoder.state, "configured");
encoder.encode(frame);
// |frame| is not longer valid since it has been destroyed.
assert_not_equals(frame.timestamp, timestamp);
assert_throws_dom("InvalidStateError", () => frame.clone());
encoder.close();
return endAfterEventLoopTurn();
}, 'Test encoder consumes (destroys) frames.');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
return testClosedCodec(t, encoder, defaultConfig, frame);
}, 'Verify closed VideoEncoder operations');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
return testUnconfiguredCodec(t, encoder, frame);
}, 'Verify unconfigured VideoEncoder operations');
promise_test(async t => {
let encoder = new VideoEncoder(getDefaultCodecInit(t));
let frame = await createVideoFrame(640, 480, 0);
frame.destroy();
encoder.configure(defaultConfig);
frame.destroy();
assert_throws_dom("OperationError", () => {
encoder.encode(frame)
});
}, 'Verify encoding destroyed frames throws.');
</script>
</html>
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
</head>
<body>
<img id='frame_image' style='display: none;' src="pattern.png">
<script>
'use strict';
async function waitTillLoaded(img) {
if (img.complete)
return Promise.resolve();
return new Promise((resolve, reject) => {
img.onload = () => resolve()
});
}
async function generateBitmap(width, height, text) {
let img = document.getElementById('frame_image');
await waitTillLoaded(img);
let cnv = document.createElement("canvas");
cnv.height = height;
cnv.width = width;
var ctx = cnv.getContext('2d');
ctx.drawImage(img, 0, 0, width, height);
ctx.font = '30px fantasy';
ctx.fillText(text, 5, 40);
return createImageBitmap(cnv);
}
async function createFrame(width, height, ts) {
let imageBitmap = await generateBitmap(width, height, ts.toString());
return new VideoFrame(imageBitmap, { timestamp: ts });
}
function delay(time_ms) {
return new Promise((resolve, reject) => {
setTimeout(resolve, time_ms);
});
};
async function encode_decode_test(codec, acc) {
const w = 640;
const h = 360;
let next_ts = 0
let frames_to_encode = 16;
let frames_encoded = 0;
let frames_decoded = 0;
let errors = 0;
let decoder = new VideoDecoder({
output(frame) {
assert_equals(frame.cropWidth, w, "cropWidth");
assert_equals(frame.cropHeight, h, "cropHeight");
assert_equals(frame.timestamp, next_ts++, "timestamp");
frames_decoded++;
frame.destroy();
},
error(e) {
errors++;
console.log(e.message);
}
});
const encoder_init = {
output(chunk, config) {
var data = new Uint8Array(chunk.data);
if (decoder.state != "configured" || config.description) {
decoder.configure(config);
}
decoder.decode(chunk);
frames_encoded++;
},
error(e) {
errors++;
console.log(e.message);
}
};
const encoder_config = {
codec: codec,
acceleration: acc,
width: w,
height: h,
bitrate: 5000000,
};
let encoder = new VideoEncoder(encoder_init);
encoder.configure(encoder_config);
for (let i = 0; i < frames_to_encode; i++) {
let frame = await createFrame(w, h, i);
let keyframe = (i % 5 == 0);
encoder.encode(frame, { keyFrame: keyframe });
await delay(1);
}
await encoder.flush();
await decoder.flush();
encoder.close();
decoder.close();
assert_equals(frames_encoded, frames_to_encode);
assert_equals(frames_decoded, frames_to_encode);
assert_equals(errors, 0);
}
async function encode_test(codec, acc) {
let w = 640;
let h = 360;
let next_ts = 0
let frames_to_encode = 25;
let frames_processed = 0;
let errors = 0;
let process_video_chunk = function (chunk, config) {
assert_greater_than_equal(chunk.timestamp, next_ts++);
let data = new Uint8Array(chunk.data);
let type = (chunk.timestamp % 5 == 0) ? "key" : "delta";
assert_equals(chunk.type, type);
assert_greater_than_equal(data.length, 0);
if (config) {
assert_equals(config.codec, codec);
assert_equals(config.codedWidth, w);
assert_equals(config.codedHeight, h);
let data = new Uint8Array(config.description);
}
frames_processed++;
};
const init = {
output: process_video_chunk,
error: (e) => {
errors++;
console.log(e.message);
},
};
const params = {
codec: codec,
acceleration: acc,
width: w,
height: h,
bitrate: 5000000,
framerate: 24,
};
let encoder = new VideoEncoder(init);
encoder.configure(params);
for (let i = 0; i < frames_to_encode; i++) {
let size_mismatch = (i % 16);
let frame = await createFrame(w + size_mismatch, h + size_mismatch, i);
let keyframe = (i % 5 == 0);
encoder.encode(frame, { keyFrame: keyframe });
await delay(1);
}
await encoder.flush();
encoder.close();
assert_equals(frames_processed, frames_to_encode);
assert_equals(errors, 0);
}
promise_test(encode_test.bind(null, "vp09.00.10.08", "allow"),
"encoding vp9 profile0");
promise_test(encode_test.bind(null, "vp09.02.10.10", "allow"),
"encoding vp9 profile2");
promise_test(encode_decode_test.bind(null, "vp09.02.10.10", "allow"),
"encoding and decoding vp9 profile2");
promise_test(encode_test.bind(null, "vp8", "allow"),
"encoding vp8");
promise_test(encode_decode_test.bind(null, "vp8", "allow"),
"encoding and decoding vp8");
promise_test(encode_decode_test.bind(null, "avc1.42001E", "allow"),
"encoding and decoding avc1.42001E");
/* Uncomment this for manual testing, before we have GPU tests for that */
//promise_test(encode_test.bind(null, "avc1.42001E", "require"),
// "encoding avc1.42001E");
//promise_test(encode_decode_test.bind(null, "avc1.42001E", "require"),
// "encoding and decoding avc1.42001E req");
</script>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
</head>
<body>
<img id='frame_image' style='display: none;' src="pattern.png">
<script>
'use strict';
async function waitTillLoaded(img) {
if (img.complete)
return Promise.resolve();
return new Promise((resolve, reject) => {
img.onload = () => resolve()
});
}
async function generateBitmap(width, height, text) {
let img = document.getElementById('frame_image');
await waitTillLoaded(img);
let cnv = document.createElement("canvas");
cnv.height = height;
cnv.width = width;
var ctx = cnv.getContext('2d');
ctx.drawImage(img, 0, 0, width, height);
ctx.font = '30px fantasy';
ctx.fillText(text, 5, 40);
return createImageBitmap(cnv);
}
async function createFrame(width, height, ts) {
let imageBitmap = await generateBitmap(width, height, ts.toString());
let frame = new VideoFrame(imageBitmap, { timestamp: ts });
imageBitmap.close();
return frame;
}
function delay(time_ms) {
return new Promise((resolve, reject) => {
setTimeout(resolve, time_ms);
});
};
async function change_encoding_params_test(codec, acc) {
let original_w = 800;
let original_h = 600;
let original_bitrate = 5_000_000;
let new_w = 640;
let new_h = 480;
let new_bitrate = 3_000_000;
let next_ts = 0
let reconf_ts = 0;
let frames_to_encode = 16;
let before_reconf_frames = 0;
let after_reconf_frames = 0;
let errors = 0;
let process_video_chunk = function (chunk, config) {
var data = new Uint8Array(chunk.data);
assert_greater_than_equal(data.length, 0);
let after_reconf = (reconf_ts != 0) && (chunk.timestamp >= reconf_ts);
if (after_reconf) {
after_reconf_frames++;
if (config) {
assert_equals(config.codedWidth, new_w);
assert_equals(config.codedHeight, new_h);
}
} else {
before_reconf_frames++;
if (config) {
assert_equals(config.codedWidth, original_w);
assert_equals(config.codedHeight, original_h);
}
}
};
const init = {
output: process_video_chunk,
error: (e) => {
errors++;
console.log(e.message);
},
};
const params = {
codec: codec,
acceleration: acc,
width: original_w,
height: original_h,
bitrate: original_bitrate,
framerate: 30,
};
let encoder = new VideoEncoder(init);
encoder.configure(params);
// Encode |frames_to_encode| frames with original settings
for (let i = 0; i < frames_to_encode; i++) {
var frame = await createFrame(original_w, original_h, next_ts++);
encoder.encode(frame, {});
await delay(1);
}
params.width = new_w;
params.height = new_h;
params.bitrate = new_bitrate;
// Reconfigure encoder and run |frames_to_encode| frames with new settings
encoder.configure(params);
reconf_ts = next_ts;
for (let i = 0; i < frames_to_encode; i++) {
var frame = await createFrame(new_w, new_h, next_ts++);
encoder.encode(frame, {});
await delay(1);
}
await encoder.flush();
// Configure back to original config
params.width = original_w;
params.height = original_h;
params.bitrate = original_bitrate;
encoder.configure(params);
await encoder.flush();
encoder.close();
assert_equals(before_reconf_frames, frames_to_encode);
assert_equals(after_reconf_frames, frames_to_encode);
assert_equals(errors, 0);
}
promise_test(change_encoding_params_test.bind(null, "vp8", "allow"),
"reconfiguring vp8");
promise_test(change_encoding_params_test.bind(null, "vp09.00.10.08", "allow"),
"reconfiguring vp9 profile0");
promise_test(change_encoding_params_test.bind(null, "vp09.02.10.10", "allow"),
"reconfiguring vp9 profile2");
promise_test(change_encoding_params_test.bind(null, "avc1.42001E", "allow"),
"reconfiguring avc1.42001E");
/* Uncomment this for manual testing, before we have GPU tests for that */
//promise_test(change_encoding_params_test.bind(null, "avc1.42001E", "require"),
// "reconfiguring avc1.42001E hw");
</script>
</body>
</html>
\ No newline at end of file
......@@ -1727,6 +1727,16 @@ Starting worker: resources/global-interface-listing-worker.js
[Worker] method decode
[Worker] method flush
[Worker] method reset
[Worker] interface VideoEncoder
[Worker] attribute @@toStringTag
[Worker] getter encodeQueueSize
[Worker] getter state
[Worker] method close
[Worker] method configure
[Worker] method constructor
[Worker] method encode
[Worker] method flush
[Worker] method reset
[Worker] interface VideoFrame
[Worker] attribute @@toStringTag
[Worker] getter codedHeight
......
// META: global=window,dedicatedworker
async function getImageAsBitmap(width, height) {
const src = "pattern.png";
var size = {
resizeWidth: width,
resizeHeight: height
};
return fetch(src)
.then(response => response.blob())
.then(blob => createImageBitmap(blob, size));
}
async function generateBitmap(width, height, text) {
let img = await getImageAsBitmap(width, height);
let cnv = new OffscreenCanvas(width, height);
var ctx = cnv.getContext('2d');
ctx.drawImage(img, 0, 0, width, height);
img.close();
ctx.font = '30px fantasy';
ctx.fillText(text, 5, 40);
return createImageBitmap(cnv);
}
async function createFrame(width, height, ts) {
let imageBitmap = await generateBitmap(width, height, ts.toString());
return new VideoFrame(imageBitmap, { timestamp: ts });
}
function delay(time_ms) {
return new Promise((resolve, reject) => {
setTimeout(resolve, time_ms);
});
};
async function encode_decode_test(codec, acc) {
const w = 640;
const h = 360;
let next_ts = 0
let frames_to_encode = 16;
let frames_encoded = 0;
let frames_decoded = 0;
let errors = 0;
let decoder = new VideoDecoder({
output(frame) {
assert_equals(frame.cropWidth, w, "cropWidth");
assert_equals(frame.cropHeight, h, "cropHeight");
assert_equals(frame.timestamp, next_ts++, "timestamp");
frames_decoded++;
frame.destroy();
},
error(e) {
errors++;
console.log(e.message);
}
});
const encoder_init = {
output(chunk, config) {
var data = new Uint8Array(chunk.data);
if (decoder.state != "configured" || config.description) {
decoder.configure(config);
}
decoder.decode(chunk);
frames_encoded++;
},
error(e) {
errors++;
console.log(e.message);
}
};
const encoder_config = {
codec: codec,
acceleration: acc,
width: w,
height: h,
bitrate: 5000000,
};
let encoder = new VideoEncoder(encoder_init);
encoder.configure(encoder_config);
for (let i = 0; i < frames_to_encode; i++) {
let frame = await createFrame(w, h, i);
let keyframe = (i % 5 == 0);
encoder.encode(frame, { keyFrame: keyframe });
await delay(1);
}
await encoder.flush();
await decoder.flush();
encoder.close();
decoder.close();
assert_equals(frames_encoded, frames_to_encode);
assert_equals(frames_decoded, frames_to_encode);
assert_equals(errors, 0);
}
async function encode_test(codec, acc) {
let w = 640;
let h = 360;
let next_ts = 0
let frames_to_encode = 25;
let frames_processed = 0;
let errors = 0;
let process_video_chunk = function (chunk, config) {
assert_greater_than_equal(chunk.timestamp, next_ts++);
let data = new Uint8Array(chunk.data);
let type = (chunk.timestamp % 5 == 0) ? "key" : "delta";
assert_equals(chunk.type, type);
assert_greater_than_equal(data.length, 0);
if (config) {
assert_equals(config.codec, codec);
assert_equals(config.codedWidth, w);
assert_equals(config.codedHeight, h);
let data = new Uint8Array(config.description);
}
frames_processed++;
};
const init = {
output: process_video_chunk,
error: (e) => {
errors++;
console.log(e.message);
},
};
const params = {
codec: codec,
acceleration: acc,
width: w,
height: h,
bitrate: 5000000,
framerate: 24,
};
let encoder = new VideoEncoder(init);
encoder.configure(params);
for (let i = 0; i < frames_to_encode; i++) {
let size_mismatch = (i % 16);
let frame = await createFrame(w + size_mismatch, h + size_mismatch, i);
let keyframe = (i % 5 == 0);
encoder.encode(frame, { keyFrame: keyframe });
await delay(1);
}
await encoder.flush();
encoder.close();
assert_equals(frames_processed, frames_to_encode);
assert_equals(errors, 0);
}
promise_test(encode_test.bind(null, "vp09.00.10.08", "allow"),
"encoding vp9 profile0");
promise_test(encode_test.bind(null, "vp09.02.10.10", "allow"),
"encoding vp9 profile2");
promise_test(encode_decode_test.bind(null, "vp09.02.10.10", "allow"),
"encoding and decoding vp9 profile2");
promise_test(encode_test.bind(null, "vp8", "allow"),
"encoding vp8");
promise_test(encode_decode_test.bind(null, "vp8", "allow"),
"encoding and decoding vp8");
promise_test(encode_decode_test.bind(null, "avc1.42001E", "allow"),
"encoding and decoding avc1.42001E");
/* Uncomment this for manual testing, before we have GPU tests for that */
// promise_test(encode_test.bind(null, "avc1.42001E", "require"),
// "encoding avc1.42001E");
// promise_test(encode_decode_test.bind(null, "avc1.42001E", "require"),
// "encoding and decoding avc1.42001E req");
// META: global=window,dedicatedworker
async function getImageAsBitmap(width, height) {
const src = "pattern.png";
var size = {
resizeWidth: width,
resizeHeight: height
};
return fetch(src)
.then(response => response.blob())
.then(blob => createImageBitmap(blob, size));
}
async function generateBitmap(width, height, text) {
let img = await getImageAsBitmap(width, height);
let cnv = new OffscreenCanvas(width, height);
var ctx = cnv.getContext('2d');
ctx.drawImage(img, 0, 0, width, height);
img.close();
ctx.font = '30px fantasy';
ctx.fillText(text, 5, 40);
return createImageBitmap(cnv);
}
async function createFrame(width, height, ts) {
let imageBitmap = await generateBitmap(width, height, ts.toString());
let frame = new VideoFrame(imageBitmap, { timestamp: ts });
imageBitmap.close();
return frame;
}
function delay(time_ms) {
return new Promise((resolve, reject) => {
setTimeout(resolve, time_ms);
});
};
async function change_encoding_params_test(codec, acc) {
let original_w = 800;
let original_h = 600;
let original_bitrate = 5_000_000;
let new_w = 640;
let new_h = 480;
let new_bitrate = 3_000_000;
let next_ts = 0
let reconf_ts = 0;
let frames_to_encode = 16;
let before_reconf_frames = 0;
let after_reconf_frames = 0;
let errors = 0;
let process_video_chunk = function (chunk, config) {
var data = new Uint8Array(chunk.data);
assert_greater_than_equal(data.length, 0);
let after_reconf = (reconf_ts != 0) && (chunk.timestamp >= reconf_ts);
if (after_reconf) {
after_reconf_frames++;
if (config) {
assert_equals(config.codedWidth, new_w);
assert_equals(config.codedHeight, new_h);
}
} else {
before_reconf_frames++;
if (config) {
assert_equals(config.codedWidth, original_w);
assert_equals(config.codedHeight, original_h);
}
}
};
const init = {
output: process_video_chunk,
error: (e) => {
errors++;
console.log(e.message);
},
};
const params = {
codec: codec,
acceleration: acc,
width: original_w,
height: original_h,
bitrate: original_bitrate,
framerate: 30,
};
let encoder = new VideoEncoder(init);
encoder.configure(params);
// Encode |frames_to_encode| frames with original settings
for (let i = 0; i < frames_to_encode; i++) {
var frame = await createFrame(original_w, original_h, next_ts++);
encoder.encode(frame, {});
await delay(1);
}
params.width = new_w;
params.height = new_h;
params.bitrate = new_bitrate;
// Reconfigure encoder and run |frames_to_encode| frames with new settings
encoder.configure(params);
reconf_ts = next_ts;
for (let i = 0; i < frames_to_encode; i++) {
var frame = await createFrame(new_w, new_h, next_ts++);
encoder.encode(frame, {});
await delay(1);
}
await encoder.flush();
// Configure back to original config
params.width = original_w;
params.height = original_h;
params.bitrate = original_bitrate;
encoder.configure(params);
await encoder.flush();
encoder.close();
assert_equals(before_reconf_frames, frames_to_encode);
assert_equals(after_reconf_frames, frames_to_encode);
assert_equals(errors, 0);
}
promise_test(change_encoding_params_test.bind(null, "vp8", "allow"),
"reconfiguring vp8");
promise_test(change_encoding_params_test.bind(null, "vp09.00.10.08", "allow"),
"reconfiguring vp9 profile0");
promise_test(change_encoding_params_test.bind(null, "vp09.02.10.10", "allow"),
"reconfiguring vp9 profile2");
promise_test(change_encoding_params_test.bind(null, "avc1.42001E", "allow"),
"reconfiguring avc1.42001E");
/* Uncomment this for manual testing, before we have GPU tests for that */
//promise_test(change_encoding_params_test.bind(null, "avc1.42001E", "require"),
// "reconfiguring avc1.42001E hw");
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment