Commit 4f28a8f0 authored by Bill Orr's avatar Bill Orr Committed by Commit Bot

Add browser test for WebXR that validates textures use the expected pose

The page encodes a frame id into the clear color, clears the texture, then submits it.

The test then reads the frame id, and validates that the pose used was the expected pose used.

The test found an issue where frames would be submitted after getting the next frame's pose,
causing extra latency and worse reprojection.  This is fixed as part of the change so the test
passes.

BUG: 854309, 850165, 801034
Cq-Include-Trybots: luci.chromium.try:android_optional_gpu_tests_rel;luci.chromium.try:linux_optional_gpu_tests_rel;luci.chromium.try:linux_vr;luci.chromium.try:mac_optional_gpu_tests_rel;luci.chromium.try:win_optional_gpu_tests_rel
Change-Id: I0f891e9929d4a0427c74ae137ed2a9c31d7744cd
Reviewed-on: https://chromium-review.googlesource.com/1123228
Commit-Queue: Bill Orr <billorr@chromium.org>
Reviewed-by: default avatarBrian Sheedy <bsheedy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#572130}
parent aefab4cc
...@@ -13,3 +13,32 @@ MockOpenVRBase::~MockOpenVRBase() { ...@@ -13,3 +13,32 @@ MockOpenVRBase::~MockOpenVRBase() {
} }
void MockOpenVRBase::OnFrameSubmitted(device::SubmittedFrameData frame_data) {} void MockOpenVRBase::OnFrameSubmitted(device::SubmittedFrameData frame_data) {}
device::DeviceConfig MockOpenVRBase::WaitGetDeviceConfig() {
device::DeviceConfig ret = {0.1f /* ipd*/,
{1, 1, 1, 1} /* raw projection left */,
{1, 1, 1, 1} /* raw projection right */};
return ret;
}
device::PoseFrameData MockOpenVRBase::WaitGetPresentingPose() {
device::PoseFrameData pose = {};
pose.is_valid = true;
// Identity matrix.
pose.device_to_origin[0] = 1;
pose.device_to_origin[5] = 1;
pose.device_to_origin[10] = 1;
pose.device_to_origin[15] = 1;
return pose;
}
device::PoseFrameData MockOpenVRBase::WaitGetMagicWindowPose() {
device::PoseFrameData pose = {};
pose.is_valid = true;
// Identity matrix.
pose.device_to_origin[0] = 1;
pose.device_to_origin[5] = 1;
pose.device_to_origin[10] = 1;
pose.device_to_origin[15] = 1;
return pose;
}
...@@ -15,6 +15,9 @@ class MockOpenVRBase : public device::OpenVRTestHook { ...@@ -15,6 +15,9 @@ class MockOpenVRBase : public device::OpenVRTestHook {
// OpenVRTestHook // OpenVRTestHook
void OnFrameSubmitted(device::SubmittedFrameData frame_data) override; void OnFrameSubmitted(device::SubmittedFrameData frame_data) override;
device::DeviceConfig WaitGetDeviceConfig() override;
device::PoseFrameData WaitGetPresentingPose() override;
device::PoseFrameData WaitGetMagicWindowPose() override;
}; };
#endif // CHROME_BROWSER_VR_TEST_MOCK_OPENVR_DEVICE_HOOK_BASE_H_ #endif // CHROME_BROWSER_VR_TEST_MOCK_OPENVR_DEVICE_HOOK_BASE_H_
...@@ -207,6 +207,7 @@ bool VrXrBrowserTestBase::RunJavaScriptAndExtractBoolOrFail( ...@@ -207,6 +207,7 @@ bool VrXrBrowserTestBase::RunJavaScriptAndExtractBoolOrFail(
const std::string& js_expression, const std::string& js_expression,
content::WebContents* web_contents) { content::WebContents* web_contents) {
bool result; bool result;
DLOG(ERROR) << "Run javascript: " << js_expression;
EXPECT_TRUE(content::ExecuteScriptAndExtractBool( EXPECT_TRUE(content::ExecuteScriptAndExtractBool(
web_contents, web_contents,
"window.domAutomationController.send(" + js_expression + ")", &result)); "window.domAutomationController.send(" + js_expression + ")", &result));
......
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/environment.h"
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "chrome/browser/vr/test/mock_openvr_device_hook_base.h"
#include "chrome/browser/vr/test/vr_browser_test.h"
#include "chrome/browser/vr/test/vr_xr_browser_test.h"
#include "chrome/browser/vr/test/xr_browser_test.h"
#include <memory>
namespace vr {
namespace {
struct Frame {
device::SubmittedFrameData submitted;
device::PoseFrameData pose;
device::DeviceConfig config;
};
class MyOpenVRMock : public MockOpenVRBase {
public:
void OnFrameSubmitted(device::SubmittedFrameData frame_data) final;
device::DeviceConfig WaitGetDeviceConfig() final {
device::DeviceConfig ret = {
0.2f /* ipd */,
{0.1f, 0.2f, 0.3f, 0.4f} /* left projection raw */,
{0.5f, 0.6f, 0.7f, 0.8f} /* right projection raw */};
return ret;
}
device::PoseFrameData WaitGetPresentingPose() final;
device::PoseFrameData WaitGetMagicWindowPose() final;
// The test waits for a submitted frame before returning.
void WaitForFrames(int count) {
DCHECK(!wait_loop_);
wait_frame_count_ = count;
base::RunLoop* wait_loop =
new base::RunLoop(base::RunLoop::Type::kNestableTasksAllowed);
wait_loop_ = wait_loop;
wait_loop->Run();
delete wait_loop;
}
std::vector<Frame> submitted_frames;
device::PoseFrameData last_exclusive_frame_data = {};
private:
// Set to null on background thread after calling Quit(), so we can ensure we
// only call Quit once.
base::RunLoop* wait_loop_ = nullptr;
int wait_frame_count_ = 0;
int num_frames_submitted_ = 0;
bool has_last_exclusive_frame_data_ = false;
int frame_id_ = 0;
};
unsigned int ParseColorFrameId(device::Color color) {
// Corresponding math in test_webxr_poses.html.
unsigned int frame_id =
static_cast<unsigned int>(color.r) + 256 * color.g + 256 * 256 * color.b;
return frame_id;
}
void MyOpenVRMock::OnFrameSubmitted(device::SubmittedFrameData frame_data) {
unsigned int frame_id = ParseColorFrameId(frame_data.color);
DLOG(ERROR) << "Frame Submitted: " << num_frames_submitted_ << " "
<< frame_id;
submitted_frames.push_back(
{frame_data, last_exclusive_frame_data, WaitGetDeviceConfig()});
num_frames_submitted_++;
if (num_frames_submitted_ >= wait_frame_count_ && wait_frame_count_ > 0 &&
wait_loop_) {
wait_loop_->Quit();
wait_loop_ = nullptr;
}
EXPECT_TRUE(has_last_exclusive_frame_data_);
// We expect a waitGetPoses, then 2 submits (one for each eye), so after 2
// submitted frames don't use the same frame_data again.
if (num_frames_submitted_ % 2 == 0)
has_last_exclusive_frame_data_ = false;
}
device::PoseFrameData MyOpenVRMock::WaitGetMagicWindowPose() {
device::PoseFrameData pose = {};
pose.is_valid = true;
// Almost identity matrix - enough different that we can identify if magic
// window poses are used instead of presenting poses.
pose.device_to_origin[0] = 1;
pose.device_to_origin[5] = -1;
pose.device_to_origin[10] = 1;
pose.device_to_origin[15] = 1;
return pose;
}
device::PoseFrameData MyOpenVRMock::WaitGetPresentingPose() {
DLOG(ERROR) << "WaitGetPresentingPose: " << frame_id_;
device::PoseFrameData pose = {};
pose.is_valid = true;
// Start with identity matrix.
pose.device_to_origin[0] = 1;
pose.device_to_origin[5] = 1;
pose.device_to_origin[10] = 1;
pose.device_to_origin[15] = 1;
// Add a translation so each frame gets a different transform, and so its easy
// to identify what the expected pose is.
pose.device_to_origin[3] = frame_id_;
has_last_exclusive_frame_data_ = true;
frame_id_++;
last_exclusive_frame_data = pose;
return pose;
}
std::string GetMatrixAsString(const float m[]) {
// Dump the transpose of the matrix due to openvr vs. webxr matrix format
// differences.
return base::StringPrintf(
"[%f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f]", m[0],
m[4], m[8], m[12], m[1], m[5], m[9], m[13], m[2], m[6], m[10], m[14],
m[3], m[7], m[11], m[15]);
}
std::string GetPoseAsString(const Frame& frame) {
return GetMatrixAsString(frame.pose.device_to_origin);
}
} // namespace
// Pixel test for WebVR/WebXR - start presentation, submit frames, get data back
// out. Validates that submitted frames used expected pose.
void TestPresentationPosesImpl(VrXrBrowserTestBase* t, std::string filename) {
MyOpenVRMock my_mock;
// Load the test page, and enter presentation.
t->LoadUrlAndAwaitInitialization(t->GetHtmlTestFile(filename));
t->EnterPresentationOrFail(t->GetFirstTabWebContents());
// Wait for javascript to submit at least one frame.
EXPECT_TRUE(t->PollJavaScriptBoolean(
"hasPresentedFrame", t->kPollTimeoutShort, t->GetFirstTabWebContents()))
<< "No frame submitted";
// Render at least 20 frames. Make sure each has the right submitted pose.
my_mock.WaitForFrames(20);
// Exit presentation.
t->ExitPresentationOrFail(t->GetFirstTabWebContents());
// Stop hooking OpenVR, so we can safely analyze our cached data without
// incoming calls (there may be leftover mojo messages queued).
device::OpenVRDeviceProvider::SetTestHook(nullptr);
// Analyze the submitted frames - check for a few things:
// 1. Each frame id should be submitted at most once for each of the left and
// right eyes.
// 2. The pose that WebXR used for rendering the submitted frame should be the
// one that we expected.
std::set<unsigned int> seen_left;
std::set<unsigned int> seen_right;
unsigned int max_frame_id = 0;
for (auto frame : my_mock.submitted_frames) {
const device::SubmittedFrameData& data = frame.submitted;
// The test page encodes the frame id as the clear color.
unsigned int frame_id = ParseColorFrameId(data.color);
// Validate that each frame is only seen once for each eye.
DLOG(ERROR) << "Frame id: " << frame_id;
if (data.left_eye) {
EXPECT_TRUE(seen_left.find(frame_id) == seen_left.end());
seen_left.insert(frame_id);
} else {
EXPECT_TRUE(seen_right.find(frame_id) == seen_right.end());
seen_right.insert(frame_id);
}
// Validate that frames arrive in order.
EXPECT_TRUE(frame_id >= max_frame_id);
max_frame_id = std::max(frame_id, max_frame_id);
// Validate that the javascript-side cache of frames contains our submitted
// frame.
EXPECT_TRUE(t->RunJavaScriptAndExtractBoolOrFail(
base::StringPrintf("checkFrameOccurred(%d)", frame_id),
t->GetFirstTabWebContents()));
// Validate that the javascript-side cache of frames has the correct pose.
EXPECT_TRUE(t->RunJavaScriptAndExtractBoolOrFail(
base::StringPrintf("checkFramePose(%d, %s)", frame_id,
GetPoseAsString(frame).c_str()),
t->GetFirstTabWebContents()));
}
// Tell javascript that it is done with the test.
t->ExecuteStepAndWait("finishTest()", t->GetFirstTabWebContents());
t->EndTest(t->GetFirstTabWebContents());
}
IN_PROC_BROWSER_TEST_F(XrBrowserTestStandard,
REQUIRES_GPU(TestPresentationPoses)) {
TestPresentationPosesImpl(this, "test_webxr_poses");
}
} // namespace vr
...@@ -1954,6 +1954,7 @@ test("browser_tests") { ...@@ -1954,6 +1954,7 @@ test("browser_tests") {
"../browser/vr/test/vr_xr_browser_test.h", "../browser/vr/test/vr_xr_browser_test.h",
"../browser/vr/test/xr_browser_test.cc", "../browser/vr/test/xr_browser_test.cc",
"../browser/vr/test/xr_browser_test.h", "../browser/vr/test/xr_browser_test.h",
"../browser/vr/webvr_frame_pose_browser_test.cc",
"../browser/vr/webvr_input_browser_test.cc", "../browser/vr/webvr_input_browser_test.cc",
"../browser/vr/webvr_pixel_browser_test.cc", "../browser/vr/webvr_pixel_browser_test.cc",
"../browser/vr/webvr_tab_browser_test.cc", "../browser/vr/webvr_tab_browser_test.cc",
......
<!doctype html>
<!--
Tests WebXR poses are correct through the pipeline.
We encode frame id in the canvas/image, and cache the pose information. The
test can query for whether each submitted frame used the correct pose.
-->
<html>
<head>
<link rel="stylesheet" type="text/css" href="../resources/webxr_e2e.css">
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script src="../../../../../../third_party/WebKit/LayoutTests/resources/testharness.js"></script>
<script src="../resources/webxr_e2e.js"></script>
<script src="../resources/webxr_boilerplate.js"></script>
<script>
var t = async_test("Pose data is correct");
var frame_id = 0;
var frame_data_array = {};
function FloatCompare(a, b) {
return Math.abs(a-b) < 0.001;
}
function MatrixCompare(a, b) {
for (var i = 0; i < 16; ++i) {
if (!FloatCompare(a[i], b[i])) return false;
}
return true;
}
function checkFrameOccurred(frame_id) {
return frame_id in frame_data_array;
}
function checkFrameProjection(frame_id, eye, expected) {
return MatrixCompare(frame_data_array[frame_id].views[eye].projectionMatrix, expected);
}
function checkFrameView(frame_id, eye, expected) {
let frame_data = frame_data_array[frame_id];
let pose = frame_data.getDevicePose(exclusiveFrameOfRef);
return MatrixCompare(pose.getViewMatrix(frame_data_array[frame_id].views[eye]), expected);
}
function checkFramePose(frame_id, expected) {
let frame_data = frame_data_array[frame_id];
let pose = frame_data.getDevicePose(exclusiveFrameOfRef);
if (!pose) {
// We can intermittently get null poses. For now treat them as passing,
// even though this should be fixed.
// TODO(https://crbug.com/859700): Make it so we don't get null poses unexpectedly.
console.log("null pose - unexpected, but pass");
return true;
}
console.log("checkFramePose: " + pose.poseModelMatrix + "\n" + expected);
return MatrixCompare(pose.poseModelMatrix, expected);
}
onExclusiveXRFrameCallback = function(session, frame, gl) {
// Encode an index into the clear color.
frame_id++;
frame_data_array[frame_id] = frame;
var encoded_frame_id = {};
encoded_frame_id.r = frame_id % 256;
encoded_frame_id.g = ((frame_id - frame_id % 256) / 256) % 256;
encoded_frame_id.b = ((frame_id - frame_id % (256 * 256)) / (256 * 256)) % 256;
// We divide by 255 rather than 256, because our range of values is [0, 255],
// which should map to [0.0f, 1.0f].
gl.clearColor(encoded_frame_id.r / 255, encoded_frame_id.g / 255, encoded_frame_id.b / 255, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
console.log("Submitting frame: " + frame_id + " " + encoded_frame_id.r);
}
function finishTest() {
t.done();
}
</script>
</body>
</html>
...@@ -53,9 +53,6 @@ function onAnimationFrame(t) { ...@@ -53,9 +53,6 @@ function onAnimationFrame(t) {
vrDisplay.requestAnimationFrame(onAnimationFrame); vrDisplay.requestAnimationFrame(onAnimationFrame);
// If presenting, set canvas to blue. Otherwise, red. // If presenting, set canvas to blue. Otherwise, red.
if (vrDisplay.isPresenting) { if (vrDisplay.isPresenting) {
if (onPresentingAnimationFrameCallback) {
onPresentingAnimationFrameCallback();
}
vrDisplay.getFrameData(frameData); vrDisplay.getFrameData(frameData);
gl.clearColor(0.0, 0.0, 1.0, 1.0); gl.clearColor(0.0, 0.0, 1.0, 1.0);
...@@ -65,6 +62,10 @@ function onAnimationFrame(t) { ...@@ -65,6 +62,10 @@ function onAnimationFrame(t) {
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5,
webglCanvas.height); webglCanvas.height);
if (onPresentingAnimationFrameCallback) {
onPresentingAnimationFrameCallback(frameData, gl);
}
if (shouldSubmitFrame) { if (shouldSubmitFrame) {
vrDisplay.submitFrame(); vrDisplay.submitFrame();
hasPresentedFrame = true; hasPresentedFrame = true;
......
...@@ -89,10 +89,10 @@ function onXRFrame(t, frame) { ...@@ -89,10 +89,10 @@ function onXRFrame(t, frame) {
// If in an exclusive session, set canvas to blue. Otherwise, red. // If in an exclusive session, set canvas to blue. Otherwise, red.
if (session.exclusive) { if (session.exclusive) {
gl.clearColor(0.0, 0.0, 1.0, 1.0);
if (onExclusiveXRFrameCallback) { if (onExclusiveXRFrameCallback) {
onExclusiveXRFrameCallback(session, frame); onExclusiveXRFrameCallback(session, frame, gl);
} }
gl.clearColor(0.0, 0.0, 1.0, 1.0);
} else { } else {
if (onMagicWindowXRFrameCallback) { if (onMagicWindowXRFrameCallback) {
onMagicWindowXRFrameCallback(session, frame); onMagicWindowXRFrameCallback(session, frame);
......
...@@ -55,6 +55,13 @@ OculusRenderLoop::~OculusRenderLoop() { ...@@ -55,6 +55,13 @@ OculusRenderLoop::~OculusRenderLoop() {
Stop(); Stop();
} }
void OculusRenderLoop::ClearPendingFrame() {
has_outstanding_frame_ = false;
if (delayed_get_frame_data_callback_) {
base::ResetAndReturn(&delayed_get_frame_data_callback_).Run();
}
}
void OculusRenderLoop::CleanUp() { void OculusRenderLoop::CleanUp() {
submit_client_ = nullptr; submit_client_ = nullptr;
binding_.Close(); binding_.Close();
...@@ -64,6 +71,7 @@ void OculusRenderLoop::SubmitFrameMissing(int16_t frame_index, ...@@ -64,6 +71,7 @@ void OculusRenderLoop::SubmitFrameMissing(int16_t frame_index,
const gpu::SyncToken& sync_token) { const gpu::SyncToken& sync_token) {
// Nothing to do. It's OK to start the next frame even if the current // Nothing to do. It's OK to start the next frame even if the current
// one didn't get sent to the ovrSession. // one didn't get sent to the ovrSession.
ClearPendingFrame();
} }
void OculusRenderLoop::SubmitFrame(int16_t frame_index, void OculusRenderLoop::SubmitFrame(int16_t frame_index,
...@@ -93,8 +101,10 @@ void OculusRenderLoop::SubmitFrameWithTextureHandle( ...@@ -93,8 +101,10 @@ void OculusRenderLoop::SubmitFrameWithTextureHandle(
platform_handle.struct_size = sizeof(platform_handle); platform_handle.struct_size = sizeof(platform_handle);
MojoResult result = MojoUnwrapPlatformHandle(texture_handle.release().value(), MojoResult result = MojoUnwrapPlatformHandle(texture_handle.release().value(),
nullptr, &platform_handle); nullptr, &platform_handle);
if (result != MOJO_RESULT_OK) if (result != MOJO_RESULT_OK) {
ClearPendingFrame();
return; return;
}
texture_helper_.SetSourceTexture( texture_helper_.SetSourceTexture(
base::win::ScopedHandle(reinterpret_cast<HANDLE>(platform_handle.value))); base::win::ScopedHandle(reinterpret_cast<HANDLE>(platform_handle.value)));
...@@ -177,6 +187,7 @@ void OculusRenderLoop::SubmitFrameWithTextureHandle( ...@@ -177,6 +187,7 @@ void OculusRenderLoop::SubmitFrameWithTextureHandle(
submit_client_->OnSubmitFrameTransferred(copy_succeeded); submit_client_->OnSubmitFrameTransferred(copy_succeeded);
submit_client_->OnSubmitFrameRendered(); submit_client_->OnSubmitFrameRendered();
#endif #endif
ClearPendingFrame();
} }
void OculusRenderLoop::UpdateLayerBounds(int16_t frame_id, void OculusRenderLoop::UpdateLayerBounds(int16_t frame_id,
...@@ -229,6 +240,7 @@ void OculusRenderLoop::ExitPresent() { ...@@ -229,6 +240,7 @@ void OculusRenderLoop::ExitPresent() {
is_presenting_ = false; is_presenting_ = false;
binding_.Close(); binding_.Close();
submit_client_ = nullptr; submit_client_ = nullptr;
ClearPendingFrame();
} }
void OculusRenderLoop::Init() {} void OculusRenderLoop::Init() {}
...@@ -241,6 +253,15 @@ void OculusRenderLoop::GetFrameData( ...@@ -241,6 +253,15 @@ void OculusRenderLoop::GetFrameData(
mojom::VRPresentationProvider::GetFrameDataCallback callback) { mojom::VRPresentationProvider::GetFrameDataCallback callback) {
DCHECK(is_presenting_); DCHECK(is_presenting_);
if (has_outstanding_frame_) {
DCHECK(!delayed_get_frame_data_callback_);
delayed_get_frame_data_callback_ =
base::BindOnce(&OculusRenderLoop::GetFrameData, base::Unretained(this),
std::move(callback));
return;
}
has_outstanding_frame_ = true;
mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New(); mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New();
frame_data->frame_id = next_frame_id_; frame_data->frame_id = next_frame_id_;
......
...@@ -61,6 +61,8 @@ class OculusRenderLoop : public base::Thread, mojom::VRPresentationProvider { ...@@ -61,6 +61,8 @@ class OculusRenderLoop : public base::Thread, mojom::VRPresentationProvider {
void Init() override; void Init() override;
void CleanUp() override; void CleanUp() override;
void ClearPendingFrame();
mojom::VRPosePtr GetPose(); mojom::VRPosePtr GetPose();
std::vector<mojom::XRInputSourceStatePtr> GetInputState( std::vector<mojom::XRInputSourceStatePtr> GetInputState(
...@@ -76,6 +78,9 @@ class OculusRenderLoop : public base::Thread, mojom::VRPresentationProvider { ...@@ -76,6 +78,9 @@ class OculusRenderLoop : public base::Thread, mojom::VRPresentationProvider {
D3D11TextureHelper texture_helper_; D3D11TextureHelper texture_helper_;
#endif #endif
base::OnceCallback<void()> delayed_get_frame_data_callback_;
bool has_outstanding_frame_ = false;
long long ovr_frame_index_ = 0; long long ovr_frame_index_ = 0;
int16_t next_frame_id_ = 0; int16_t next_frame_id_ = 0;
bool is_presenting_ = false; bool is_presenting_ = false;
......
...@@ -13,12 +13,6 @@ ...@@ -13,12 +13,6 @@
#include "base/command_line.h" #include "base/command_line.h"
namespace {
// Use the pattern established in content_switches.h, but don't add a content
// dependency -- device shouldn't have one.
constexpr char kTestType[] = "test-type";
} // namespace
namespace device { namespace device {
void OpenVRDeviceProvider::RecordRuntimeAvailability() { void OpenVRDeviceProvider::RecordRuntimeAvailability() {
...@@ -43,7 +37,6 @@ OpenVRDeviceProvider::~OpenVRDeviceProvider() { ...@@ -43,7 +37,6 @@ OpenVRDeviceProvider::~OpenVRDeviceProvider() {
} }
if (test_hook_registration_s) { if (test_hook_registration_s) {
DCHECK(base::CommandLine::ForCurrentProcess()->HasSwitch(kTestType));
test_hook_registration_s->SetTestHook(nullptr); test_hook_registration_s->SetTestHook(nullptr);
} }
...@@ -66,7 +59,6 @@ void OpenVRDeviceProvider::Initialize( ...@@ -66,7 +59,6 @@ void OpenVRDeviceProvider::Initialize(
} }
void OpenVRDeviceProvider::SetTestHook(OpenVRTestHook* test_hook) { void OpenVRDeviceProvider::SetTestHook(OpenVRTestHook* test_hook) {
DCHECK(base::CommandLine::ForCurrentProcess()->HasSwitch(kTestType));
test_hook_s = test_hook; test_hook_s = test_hook;
if (test_hook_registration_s) { if (test_hook_registration_s) {
test_hook_registration_s->SetTestHook(test_hook_s); test_hook_registration_s->SetTestHook(test_hook_s);
...@@ -84,8 +76,13 @@ void OpenVRDeviceProvider::CreateDevice() { ...@@ -84,8 +76,13 @@ void OpenVRDeviceProvider::CreateDevice() {
vr::IVRSystem* vr_system = vr::IVRSystem* vr_system =
vr::VR_Init(&init_error, vr::EVRApplicationType::VRApplication_Scene); vr::VR_Init(&init_error, vr::EVRApplicationType::VRApplication_Scene);
if (base::CommandLine::ForCurrentProcess()->HasSwitch(kTestType)) { if (test_hook_s) {
// Allow our mock implementation of OpenVR to be controlled by tests. // Allow our mock implementation of OpenVR to be controlled by tests.
// Note that SetTestHook must be called before CreateDevice, or
// test_hook_registration_s will remain null. This is a good pattern for
// tests anyway, since the alternative is we start mocking part-way through
// using the device, and end up with race conditions for when we started
// controlling things.
vr::EVRInitError eError; vr::EVRInitError eError;
test_hook_registration_s = reinterpret_cast<TestHookRegistration*>( test_hook_registration_s = reinterpret_cast<TestHookRegistration*>(
vr::VR_GetGenericInterface(kChromeOpenVRTestHookAPI, &eError)); vr::VR_GetGenericInterface(kChromeOpenVRTestHookAPI, &eError));
......
...@@ -55,10 +55,18 @@ OpenVRRenderLoop::~OpenVRRenderLoop() { ...@@ -55,10 +55,18 @@ OpenVRRenderLoop::~OpenVRRenderLoop() {
Stop(); Stop();
} }
void OpenVRRenderLoop::ClearPendingFrame() {
has_outstanding_frame_ = false;
if (delayed_get_frame_data_callback_) {
base::ResetAndReturn(&delayed_get_frame_data_callback_).Run();
}
}
void OpenVRRenderLoop::SubmitFrameMissing(int16_t frame_index, void OpenVRRenderLoop::SubmitFrameMissing(int16_t frame_index,
const gpu::SyncToken& sync_token) { const gpu::SyncToken& sync_token) {
// Nothing to do. It's OK to start the next frame even if the current // Nothing to do. It's OK to start the next frame even if the current
// one didn't get sent to OpenVR. // one didn't get sent to OpenVR.
ClearPendingFrame();
} }
void OpenVRRenderLoop::SubmitFrame(int16_t frame_index, void OpenVRRenderLoop::SubmitFrame(int16_t frame_index,
...@@ -80,14 +88,15 @@ void OpenVRRenderLoop::SubmitFrameWithTextureHandle( ...@@ -80,14 +88,15 @@ void OpenVRRenderLoop::SubmitFrameWithTextureHandle(
mojo::ScopedHandle texture_handle) { mojo::ScopedHandle texture_handle) {
TRACE_EVENT1("gpu", "SubmitFrameWithTextureHandle", "frameIndex", TRACE_EVENT1("gpu", "SubmitFrameWithTextureHandle", "frameIndex",
frame_index); frame_index);
#if defined(OS_WIN) #if defined(OS_WIN)
MojoPlatformHandle platform_handle; MojoPlatformHandle platform_handle;
platform_handle.struct_size = sizeof(platform_handle); platform_handle.struct_size = sizeof(platform_handle);
MojoResult result = MojoUnwrapPlatformHandle(texture_handle.release().value(), MojoResult result = MojoUnwrapPlatformHandle(texture_handle.release().value(),
nullptr, &platform_handle); nullptr, &platform_handle);
if (result != MOJO_RESULT_OK) if (result != MOJO_RESULT_OK) {
ClearPendingFrame();
return; return;
}
texture_helper_.SetSourceTexture( texture_helper_.SetSourceTexture(
base::win::ScopedHandle(reinterpret_cast<HANDLE>(platform_handle.value))); base::win::ScopedHandle(reinterpret_cast<HANDLE>(platform_handle.value)));
...@@ -125,6 +134,8 @@ void OpenVRRenderLoop::SubmitFrameWithTextureHandle( ...@@ -125,6 +134,8 @@ void OpenVRRenderLoop::SubmitFrameWithTextureHandle(
submit_client_->OnSubmitFrameTransferred(copy_successful); submit_client_->OnSubmitFrameTransferred(copy_successful);
submit_client_->OnSubmitFrameRendered(); submit_client_->OnSubmitFrameRendered();
#endif #endif
ClearPendingFrame();
} }
void OpenVRRenderLoop::CleanUp() { void OpenVRRenderLoop::CleanUp() {
...@@ -195,6 +206,7 @@ void OpenVRRenderLoop::ExitPresent() { ...@@ -195,6 +206,7 @@ void OpenVRRenderLoop::ExitPresent() {
binding_.Close(); binding_.Close();
submit_client_ = nullptr; submit_client_ = nullptr;
vr_compositor_->SuspendRendering(true); vr_compositor_->SuspendRendering(true);
ClearPendingFrame();
} }
mojom::VRPosePtr OpenVRRenderLoop::GetPose() { mojom::VRPosePtr OpenVRRenderLoop::GetPose() {
...@@ -234,6 +246,17 @@ base::WeakPtr<OpenVRRenderLoop> OpenVRRenderLoop::GetWeakPtr() { ...@@ -234,6 +246,17 @@ base::WeakPtr<OpenVRRenderLoop> OpenVRRenderLoop::GetWeakPtr() {
void OpenVRRenderLoop::GetFrameData( void OpenVRRenderLoop::GetFrameData(
mojom::VRPresentationProvider::GetFrameDataCallback callback) { mojom::VRPresentationProvider::GetFrameDataCallback callback) {
DCHECK(is_presenting_); DCHECK(is_presenting_);
if (has_outstanding_frame_) {
DCHECK(!delayed_get_frame_data_callback_);
delayed_get_frame_data_callback_ =
base::BindOnce(&OpenVRRenderLoop::GetFrameData, base::Unretained(this),
std::move(callback));
return;
}
has_outstanding_frame_ = true;
mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New(); mojom::XRFrameDataPtr frame_data = mojom::XRFrameData::New();
frame_data->frame_id = next_frame_id_; frame_data->frame_id = next_frame_id_;
......
...@@ -60,6 +60,8 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider { ...@@ -60,6 +60,8 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider {
void Init() override; void Init() override;
void CleanUp() override; void CleanUp() override;
void ClearPendingFrame();
mojom::VRPosePtr GetPose(); mojom::VRPosePtr GetPose();
std::vector<mojom::XRInputSourceStatePtr> GetInputState( std::vector<mojom::XRInputSourceStatePtr> GetInputState(
vr::TrackedDevicePose_t* poses, vr::TrackedDevicePose_t* poses,
...@@ -76,6 +78,9 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider { ...@@ -76,6 +78,9 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider {
D3D11TextureHelper texture_helper_; D3D11TextureHelper texture_helper_;
#endif #endif
base::OnceCallback<void()> delayed_get_frame_data_callback_;
bool has_outstanding_frame_ = false;
int16_t next_frame_id_ = 0; int16_t next_frame_id_ = 0;
bool is_presenting_ = false; bool is_presenting_ = false;
InputActiveState input_active_states_[vr::k_unMaxTrackedDeviceCount]; InputActiveState input_active_states_[vr::k_unMaxTrackedDeviceCount];
......
...@@ -470,10 +470,11 @@ void TestVRSystem::GetProjectionRaw(EVREye eEye, ...@@ -470,10 +470,11 @@ void TestVRSystem::GetProjectionRaw(EVREye eEye,
float* pfRight, float* pfRight,
float* pfTop, float* pfTop,
float* pfBottom) { float* pfBottom) {
*pfLeft = 1; auto proj = g_test_helper.GetProjectionRaw(eEye == EVREye::Eye_Left);
*pfRight = 1; *pfLeft = proj.projection[0];
*pfTop = 1; *pfRight = proj.projection[1];
*pfBottom = 1; *pfTop = proj.projection[2];
*pfBottom = proj.projection[3];
} }
HmdMatrix34_t TestVRSystem::GetEyeToHeadTransform(EVREye eEye) { HmdMatrix34_t TestVRSystem::GetEyeToHeadTransform(EVREye eEye) {
...@@ -491,19 +492,8 @@ void TestVRSystem::GetDeviceToAbsoluteTrackingPose( ...@@ -491,19 +492,8 @@ void TestVRSystem::GetDeviceToAbsoluteTrackingPose(
VR_ARRAY_COUNT(unTrackedDevicePoseArrayCount) VR_ARRAY_COUNT(unTrackedDevicePoseArrayCount)
TrackedDevicePose_t* pTrackedDevicePoseArray, TrackedDevicePose_t* pTrackedDevicePoseArray,
uint32_t unTrackedDevicePoseArrayCount) { uint32_t unTrackedDevicePoseArrayCount) {
TrackedDevicePose_t pose = {}; TrackedDevicePose_t pose = g_test_helper.GetPose(false /* presenting pose */);
pose.mDeviceToAbsoluteTracking.m[0][0] = 1;
pose.mDeviceToAbsoluteTracking.m[1][1] = 1;
pose.mDeviceToAbsoluteTracking.m[2][2] = 1;
pose.mDeviceToAbsoluteTracking.m[0][2] = 5;
pose.vVelocity = {0, 0, 0};
pose.vAngularVelocity = {0, 0, 0};
pose.eTrackingResult = TrackingResult_Running_OK;
pose.bPoseIsValid = true;
pose.bDeviceIsConnected = true;
pTrackedDevicePoseArray[0] = pose; pTrackedDevicePoseArray[0] = pose;
for (unsigned int i = 1; i < unTrackedDevicePoseArrayCount; ++i) { for (unsigned int i = 1; i < unTrackedDevicePoseArrayCount; ++i) {
TrackedDevicePose_t pose = {}; TrackedDevicePose_t pose = {};
pTrackedDevicePoseArray[i] = pose; pTrackedDevicePoseArray[i] = pose;
...@@ -536,7 +526,7 @@ float TestVRSystem::GetFloatTrackedDeviceProperty( ...@@ -536,7 +526,7 @@ float TestVRSystem::GetFloatTrackedDeviceProperty(
} }
switch (prop) { switch (prop) {
case Prop_UserIpdMeters_Float: case Prop_UserIpdMeters_Float:
return 0.06f; return g_test_helper.GetIpd();
default: default:
NOTIMPLEMENTED(); NOTIMPLEMENTED();
} }
...@@ -560,23 +550,24 @@ EVRCompositorError TestVRCompositor::WaitGetPoses(TrackedDevicePose_t* poses1, ...@@ -560,23 +550,24 @@ EVRCompositorError TestVRCompositor::WaitGetPoses(TrackedDevicePose_t* poses1,
unsigned int count1, unsigned int count1,
TrackedDevicePose_t* poses2, TrackedDevicePose_t* poses2,
unsigned int count2) { unsigned int count2) {
if (poses1) TrackedDevicePose_t pose = g_test_helper.GetPose(true /* presenting pose */);
g_system.GetDeviceToAbsoluteTrackingPose(TrackingUniverseSeated, 0, poses1, for (unsigned int i = 0; i < count1; ++i) {
count1); poses1[i] = pose;
}
if (poses2) for (unsigned int i = 0; i < count2; ++i) {
g_system.GetDeviceToAbsoluteTrackingPose(TrackingUniverseSeated, 0, poses2, poses2[i] = pose;
count2); }
return VRCompositorError_None; return VRCompositorError_None;
} }
EVRCompositorError TestVRCompositor::Submit(EVREye, EVRCompositorError TestVRCompositor::Submit(EVREye eye,
Texture_t const* texture, Texture_t const* texture,
VRTextureBounds_t const*, VRTextureBounds_t const* bounds,
EVRSubmitFlags) { EVRSubmitFlags) {
g_test_helper.OnPresentedFrame( g_test_helper.OnPresentedFrame(
reinterpret_cast<ID3D11Texture2D*>(texture->handle)); reinterpret_cast<ID3D11Texture2D*>(texture->handle), bounds, eye);
return VRCompositorError_None; return VRCompositorError_None;
} }
......
...@@ -21,7 +21,9 @@ void TestHelper::TestFailure() { ...@@ -21,7 +21,9 @@ void TestHelper::TestFailure() {
NOTREACHED(); NOTREACHED();
} }
void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) { void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture,
const VRTextureBounds_t* bounds,
EVREye eye) {
// Early-out if there is nobody listening. // Early-out if there is nobody listening.
bool is_hooked = false; bool is_hooked = false;
lock_.Acquire(); lock_.Acquire();
...@@ -32,6 +34,11 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) { ...@@ -32,6 +34,11 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) {
if (!is_hooked) if (!is_hooked)
return; return;
device::SubmittedFrameData frame_data = {};
frame_data.left_eye = (eye == Eye_Left);
frame_data.viewport = {bounds->uMin, bounds->uMax, bounds->vMin,
bounds->vMax};
Microsoft::WRL::ComPtr<ID3D11Device> device; Microsoft::WRL::ComPtr<ID3D11Device> device;
texture->GetDevice(&device); texture->GetDevice(&device);
...@@ -43,8 +50,15 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) { ...@@ -43,8 +50,15 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) {
Microsoft::WRL::ComPtr<ID3D11Texture2D> texture_copy; Microsoft::WRL::ComPtr<ID3D11Texture2D> texture_copy;
D3D11_TEXTURE2D_DESC desc; D3D11_TEXTURE2D_DESC desc;
texture->GetDesc(&desc); texture->GetDesc(&desc);
frame_data.image_width = desc.Width;
frame_data.image_height = desc.Height;
size_t buffer_size = sizeof(device::SubmittedFrameData::raw_buffer);
size_t buffer_size_pixels = buffer_size / sizeof(device::Color);
desc.Width = 1; desc.Width = 1;
desc.Height = 1; desc.Height = buffer_size_pixels;
desc.MiscFlags = 0; desc.MiscFlags = 0;
desc.BindFlags = 0; desc.BindFlags = 0;
desc.Usage = D3D11_USAGE_STAGING; desc.Usage = D3D11_USAGE_STAGING;
...@@ -55,7 +69,7 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) { ...@@ -55,7 +69,7 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) {
return; return;
} }
D3D11_BOX box = {0, 0, 0, 1, 1, 1}; // a 1-pixel box D3D11_BOX box = {0, 0, 0, buffer_size_pixels, 1, 1}; // a 1-pixel box
context->CopySubresourceRegion(texture_copy.Get(), 0, 0, 0, 0, texture, 0, context->CopySubresourceRegion(texture_copy.Get(), 0, 0, 0, 0, texture, 0,
&box); &box);
...@@ -68,15 +82,83 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) { ...@@ -68,15 +82,83 @@ void TestHelper::OnPresentedFrame(ID3D11Texture2D* texture) {
// We have a 1-pixel image. Give it to the test hook. // We have a 1-pixel image. Give it to the test hook.
device::Color* color = reinterpret_cast<device::Color*>(map_data.pData); device::Color* color = reinterpret_cast<device::Color*>(map_data.pData);
frame_data.color = color[0];
memcpy(&frame_data.raw_buffer, map_data.pData, buffer_size);
lock_.Acquire(); lock_.Acquire();
if (test_hook_) if (test_hook_)
test_hook_->OnFrameSubmitted(device::SubmittedFrameData{color[0]}); test_hook_->OnFrameSubmitted(frame_data);
lock_.Release(); lock_.Release();
context->Unmap(texture_copy.Get(), 0); context->Unmap(texture_copy.Get(), 0);
} }
namespace {
vr::TrackedDevicePose_t TranslatePose(device::PoseFrameData pose) {
vr::TrackedDevicePose_t ret = {};
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 3; ++j) {
ret.mDeviceToAbsoluteTracking.m[j][i] = pose.device_to_origin[j * 4 + i];
}
}
ret.vVelocity = {0, 0, 0};
ret.vAngularVelocity = {0, 0, 0};
ret.eTrackingResult = TrackingResult_Running_OK;
ret.bPoseIsValid = pose.is_valid;
ret.bDeviceIsConnected = true;
return ret;
}
} // namespace
float TestHelper::GetIpd() {
lock_.Acquire();
if (test_hook_) {
auto config = test_hook_->WaitGetDeviceConfig();
lock_.Release();
return config.ipd;
}
lock_.Release();
return 0.1f;
}
ProjectionRaw TestHelper::GetProjectionRaw(bool left) {
lock_.Acquire();
if (test_hook_) {
auto config = test_hook_->WaitGetDeviceConfig();
ProjectionRaw ret = {};
float* projection = left ? config.viewport_left : config.viewport_right;
ret.projection[0] = projection[0];
ret.projection[1] = projection[1];
ret.projection[2] = projection[2];
ret.projection[3] = projection[3];
lock_.Release();
return ret;
}
lock_.Release();
return {{1, 1, 1, 1}};
}
vr::TrackedDevicePose_t TestHelper::GetPose(bool presenting) {
lock_.Acquire();
if (test_hook_) {
auto ret = TranslatePose(presenting ? test_hook_->WaitGetPresentingPose()
: test_hook_->WaitGetMagicWindowPose());
lock_.Release();
return ret;
}
lock_.Release();
device::PoseFrameData pose = {};
pose.is_valid = true;
pose.device_to_origin[0] = 1;
pose.device_to_origin[5] = 1;
pose.device_to_origin[10] = 1;
return TranslatePose(pose);
}
void TestHelper::SetTestHook(device::OpenVRTestHook* hook) { void TestHelper::SetTestHook(device::OpenVRTestHook* hook) {
lock_.Acquire(); lock_.Acquire();
test_hook_ = hook; test_hook_ = hook;
......
...@@ -7,15 +7,25 @@ ...@@ -7,15 +7,25 @@
#include "base/synchronization/lock.h" #include "base/synchronization/lock.h"
#include "device/vr/openvr/test/test_hook.h" #include "device/vr/openvr/test/test_hook.h"
#include "third_party/openvr/src/headers/openvr.h"
class ID3D11Texture2D; class ID3D11Texture2D;
namespace vr { namespace vr {
struct ProjectionRaw {
float projection[4];
};
class TestHelper : public device::TestHookRegistration { class TestHelper : public device::TestHookRegistration {
public: public:
// Methods called by mock OpenVR APIs. // Methods called by mock OpenVR APIs.
void OnPresentedFrame(ID3D11Texture2D* texture); void OnPresentedFrame(ID3D11Texture2D* texture,
const VRTextureBounds_t* bounds,
EVREye eye);
TrackedDevicePose_t GetPose(bool presenting);
float GetIpd();
ProjectionRaw GetProjectionRaw(bool left);
void TestFailure(); void TestFailure();
// TestHookRegistration // TestHookRegistration
......
...@@ -17,14 +17,40 @@ struct Color { ...@@ -17,14 +17,40 @@ struct Color {
unsigned char a; unsigned char a;
}; };
struct Viewport {
float left, right, top, bottom;
};
struct SubmittedFrameData { struct SubmittedFrameData {
Color color; Color color;
bool left_eye;
Viewport viewport;
unsigned int image_width;
unsigned int image_height;
char raw_buffer[256]; // Can encode raw data here.
};
struct PoseFrameData {
float device_to_origin[16];
bool is_valid;
};
struct DeviceConfig {
float ipd;
float viewport_left[4]; // raw projection left {left, right, top, bottom}
float viewport_right[4]; // raw projection right {left, right, top, bottom}
}; };
// Tests may implement this, and register it to control behavior of OpenVR. // Tests may implement this, and register it to control behavior of OpenVR.
class OpenVRTestHook { class OpenVRTestHook {
public: public:
virtual void OnFrameSubmitted(SubmittedFrameData frame_data) = 0; virtual void OnFrameSubmitted(SubmittedFrameData frame_data) = 0;
virtual DeviceConfig WaitGetDeviceConfig() = 0;
virtual PoseFrameData WaitGetPresentingPose() = 0;
virtual PoseFrameData WaitGetMagicWindowPose() = 0;
}; };
class TestHookRegistration { class TestHookRegistration {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment