Commit d3e9434c authored by Klaus Weidner's avatar Klaus Weidner Committed by Commit Bot

WebXR: Use a head pose for GVR controller updates

GVR's built-in elbow model for the 3DoF controller requires a head pose to
calculate an estimated torso rotation and the elbow angles derived from that.
Passing this through needed some restructuring of the SendVSync logic to handle
asynchronous updates while avoiding duplicate pose calculations.

This also fixes the stuck-in-place controller on Lenovo Mirage, a 6DoF headset
with a 3DoF controller.

Bug: 968193
Change-Id: I14e95299a1e8134b1755a6ca14459b710f598c1c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1636830Reviewed-by: default avatarBrian Sheedy <bsheedy@chromium.org>
Commit-Queue: Klaus Weidner <klausw@chromium.org>
Cr-Commit-Position: refs/heads/master@{#664646}
parent 88f55637
......@@ -284,7 +284,7 @@ void GvrSchedulerDelegate::OnGpuProcessConnectionReady() {
// See if we can send a VSync.
webxr_.NotifyMailboxBridgeReady();
WebXrTryStartAnimatingFrame(false);
WebXrTryStartAnimatingFrame();
}
void GvrSchedulerDelegate::CreateSurfaceBridge(
......@@ -386,14 +386,44 @@ void GvrSchedulerDelegate::OnVSync(base::TimeTicks frame_time) {
vsync_helper_.RequestVSync();
if (ShouldDrawWebVr())
browser_renderer_->ProcessControllerInputForWebXr(frame_time);
else
DrawFrame(-1, frame_time);
// The controller update logic is a bit complicated. We poll controller state
// on every VSync to ensure that the "exit VR" button stays responsive even
// for uncooperative apps. This has the side effect of filling in the
// input_states_ variable which gets attached to the frame data sent via
// the next SendVSync. However, fetching controller data needs a head pose
// to ensure the elbow model works right.
//
// If we're about to run SendVSync now, fetch a fresh head pose now and use
// that for both the controller update and for SendVSync. If not, just use
// a recent-ish head pose for the controller update, falling back to an
// identity transform if needed.
webxr_vsync_pending_ = true;
pending_time_ = frame_time;
WebXrTryStartAnimatingFrame(true);
bool can_animate = WebVrCanAnimateFrame(true);
if (ShouldDrawWebVr()) {
gfx::Transform head_mat;
device::mojom::VRPosePtr pose;
if (can_animate) {
// Get a fresh head pose.
pose = GetHeadPose(&head_mat);
} else if (webxr_.HaveAnimatingFrame()) {
// Get the most-recently-used head pose, if available. If we don't have an
// animating frame, it's OK to use the default head_mat value which is an
// identity transform.
head_mat = webxr_.GetAnimatingFrame()->head_pose;
}
browser_renderer_->ProcessControllerInputForWebXr(head_mat, frame_time);
if (can_animate)
SendVSync(std::move(pose), head_mat);
} else {
DrawFrame(-1, frame_time);
if (can_animate)
SendVSyncWithNewHeadPose();
}
}
void GvrSchedulerDelegate::DrawFrame(int16_t frame_index,
......@@ -664,7 +694,7 @@ void GvrSchedulerDelegate::DrawFrameSubmitNow(FrameType frame_type,
// See if we can animate a new WebVR frame. Intentionally using
// ShouldDrawWebVr here since we also want to run this check after
// UI frames, i.e. transitioning from transient UI to WebVR.
WebXrTryStartAnimatingFrame(false);
WebXrTryStartAnimatingFrame();
}
}
......@@ -749,9 +779,12 @@ bool GvrSchedulerDelegate::WebVrCanAnimateFrame(bool is_from_onvsync) {
return true;
}
void GvrSchedulerDelegate::WebXrTryStartAnimatingFrame(bool is_from_onvsync) {
if (WebVrCanAnimateFrame(is_from_onvsync)) {
SendVSync();
void GvrSchedulerDelegate::WebXrTryStartAnimatingFrame() {
// This method is only used outside OnVSync, so the is_from_onvsync argument
// to WebVrCanAnimateFrame is always false. OnVSync calls SendVSync directly
// if needed, bypassing this method, so that it can supply a specific pose.
if (WebVrCanAnimateFrame(false)) {
SendVSyncWithNewHeadPose();
}
}
......@@ -853,7 +886,28 @@ bool GvrSchedulerDelegate::WebVrHasOverstuffedBuffers() {
return false;
}
void GvrSchedulerDelegate::SendVSync() {
device::mojom::VRPosePtr GvrSchedulerDelegate::GetHeadPose(
gfx::Transform* head_mat_out) {
int64_t prediction_nanos = GetPredictedFrameTime().InMicroseconds() * 1000;
TRACE_EVENT_BEGIN0("gpu", "GvrSchedulerDelegate::GetVRPosePtrWithNeckModel");
device::mojom::VRPosePtr pose =
device::GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_, head_mat_out,
prediction_nanos);
TRACE_EVENT_END0("gpu", "GvrSchedulerDelegate::GetVRPosePtrWithNeckModel");
return pose;
}
void GvrSchedulerDelegate::SendVSyncWithNewHeadPose() {
gfx::Transform head_mat;
device::mojom::VRPosePtr pose = GetHeadPose(&head_mat);
SendVSync(std::move(pose), head_mat);
}
void GvrSchedulerDelegate::SendVSync(device::mojom::VRPosePtr pose,
const gfx::Transform& head_mat) {
DCHECK(!get_frame_data_callback_.is_null());
DCHECK(webxr_vsync_pending_);
......@@ -879,15 +933,6 @@ void GvrSchedulerDelegate::SendVSync() {
frame_data->buffer_holder = buffer->mailbox_holder;
}
int64_t prediction_nanos = GetPredictedFrameTime().InMicroseconds() * 1000;
gfx::Transform head_mat;
TRACE_EVENT_BEGIN0("gpu", "GvrSchedulerDelegate::GetVRPosePtrWithNeckModel");
device::mojom::VRPosePtr pose =
device::GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_, &head_mat,
prediction_nanos);
TRACE_EVENT_END0("gpu", "GvrSchedulerDelegate::GetVRPosePtrWithNeckModel");
// Process all events. Check for ones we wish to react to.
gvr::Event last_event;
while (gvr_api_->PollEvent(&last_event)) {
......@@ -1036,7 +1081,7 @@ void GvrSchedulerDelegate::GetFrameData(
}
get_frame_data_callback_ = std::move(callback);
WebXrTryStartAnimatingFrame(false);
WebXrTryStartAnimatingFrame();
}
void GvrSchedulerDelegate::SubmitFrameMissing(
......@@ -1240,7 +1285,7 @@ void GvrSchedulerDelegate::ProcessWebVrFrameFromMailbox(
// Unblock the next animating frame in case it was waiting for this
// one to start processing.
WebXrTryStartAnimatingFrame(false);
WebXrTryStartAnimatingFrame();
}
void GvrSchedulerDelegate::OnWebXrTokenSignaled(
......@@ -1271,7 +1316,7 @@ void GvrSchedulerDelegate::ProcessWebVrFrameFromGMB(
// Unblock the next animating frame in case it was waiting for this
// one to start processing.
WebXrTryStartAnimatingFrame(false);
WebXrTryStartAnimatingFrame();
}
void GvrSchedulerDelegate::GetEnvironmentIntegrationProvider(
......
......@@ -98,7 +98,10 @@ class GvrSchedulerDelegate : public BaseSchedulerDelegate,
void WebXrCancelProcessingFrameAfterTransfer();
// Sends a GetFrameData response to the presentation client.
void SendVSync();
void SendVSyncWithNewHeadPose();
void SendVSync(device::mojom::VRPosePtr pose, const gfx::Transform& head_mat);
device::mojom::VRPosePtr GetHeadPose(gfx::Transform* head_mat_out);
void WebXrPrepareSharedBuffer();
void WebXrCreateOrResizeSharedBufferImage(WebXrSharedBuffer* buffer,
const gfx::Size& size);
......@@ -110,7 +113,7 @@ class GvrSchedulerDelegate : public BaseSchedulerDelegate,
bool WebVrCanAnimateFrame(bool is_from_onvsync);
// Call this after state changes that could result in WebVrCanAnimateFrame
// becoming true.
void WebXrTryStartAnimatingFrame(bool is_from_onvsync);
void WebXrTryStartAnimatingFrame();
bool ShouldDrawWebVr();
......
......@@ -284,14 +284,14 @@ void BrowserRenderer::UpdateUi(const RenderInfo& render_info,
}
void BrowserRenderer::ProcessControllerInputForWebXr(
const gfx::Transform& head_pose,
base::TimeTicks current_time) {
TRACE_EVENT0("gpu", "Vr.ProcessControllerInputForWebXr");
DCHECK(input_delegate_);
DCHECK(ui_);
base::TimeTicks timing_start = base::TimeTicks::Now();
// No transform required for input handling while in WebXR.
input_delegate_->UpdateController(gfx::Transform(), current_time, true);
input_delegate_->UpdateController(head_pose, current_time, true);
auto input_event_list = input_delegate_->GetGestures(current_time);
ui_->HandleMenuButtonEvents(&input_event_list);
......
......@@ -88,7 +88,8 @@ class VR_EXPORT BrowserRenderer : public SchedulerBrowserRendererInterface {
void DrawBrowserFrame(base::TimeTicks current_time) override;
void DrawWebXrFrame(base::TimeTicks current_time,
const gfx::Transform& head_pose) override;
void ProcessControllerInputForWebXr(base::TimeTicks current_time) override;
void ProcessControllerInputForWebXr(const gfx::Transform& head_pose,
base::TimeTicks current_time) override;
void Draw(FrameType frame_type,
base::TimeTicks current_time,
......
......@@ -380,7 +380,8 @@ TEST_F(BrowserRendererTest, ProcessControllerInputForWebXr) {
EXPECT_CALL(*ui_, HandleInput(_, _, _, _, _)).Times(0);
EXPECT_CALL(*ui_, HandleMenuButtonEvents(_)).Times(1).InSequence(s);
browser_renderer->ProcessControllerInputForWebXr(base::TimeTicks());
browser_renderer->ProcessControllerInputForWebXr(gfx::Transform(),
base::TimeTicks());
}
} // namespace vr
......@@ -22,7 +22,8 @@ class SchedulerBrowserRendererInterface {
// Pass the same head_pose used to render the submitted WebXR frame.
virtual void DrawWebXrFrame(base::TimeTicks current_time,
const gfx::Transform& head_pose) = 0;
virtual void ProcessControllerInputForWebXr(base::TimeTicks current_time) = 0;
virtual void ProcessControllerInputForWebXr(const gfx::Transform& head_pose,
base::TimeTicks current_time) = 0;
};
} // namespace vr
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment