Commit 1882f132 authored by Klaus Weidner's avatar Klaus Weidner Committed by Commit Bot

Track WebVR timings, add WaitPrevStrategy

In preparation for GpuFence synchronization, track GVR acquire/submit
times and report JavaScript waitForPreviousRender times back to
VrShellGl via Mojo.

Refactor waiting for previous transfer and render, using helper
functions to unclutter submitFrame.

Wait for previous transfer is only used for the Android surface path,
move it there.

Wait for previous render now support different choices for the
execution point, including NEVER for the OpenVR path. The BEFORE_BITMAP
choice is not yet active, it's intended for use with GpuFence-separated
frames. It does work for the current render path too, but IIRC is a bit
slower there due to less parallelism.

BUG=761432

Change-Id: I9d24d6e0a133ad76cfda4f40c2bba84ffd7363ef
Reviewed-on: https://chromium-review.googlesource.com/802756Reviewed-by: default avatarDaniel Cheng <dcheng@chromium.org>
Reviewed-by: default avatarBill Orr <billorr@chromium.org>
Reviewed-by: default avatarMichael Thiessen <mthiesse@chromium.org>
Commit-Queue: Klaus Weidner <klausw@chromium.org>
Cr-Commit-Position: refs/heads/master@{#521735}
parent 2cc2073f
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
#include "chrome/browser/android/vr_shell/vr_shell_gl.h" #include "chrome/browser/android/vr_shell/vr_shell_gl.h"
#include <algorithm>
#include <chrono> #include <chrono>
#include <limits> #include <limits>
#include <utility> #include <utility>
...@@ -25,11 +26,9 @@ ...@@ -25,11 +26,9 @@
#include "chrome/browser/android/vr_shell/vr_usage_monitor.h" #include "chrome/browser/android/vr_shell/vr_usage_monitor.h"
#include "chrome/browser/vr/assets.h" #include "chrome/browser/vr/assets.h"
#include "chrome/browser/vr/elements/ui_element.h" #include "chrome/browser/vr/elements/ui_element.h"
#include "chrome/browser/vr/fps_meter.h"
#include "chrome/browser/vr/model/camera_model.h" #include "chrome/browser/vr/model/camera_model.h"
#include "chrome/browser/vr/model/model.h" #include "chrome/browser/vr/model/model.h"
#include "chrome/browser/vr/pose_util.h" #include "chrome/browser/vr/pose_util.h"
#include "chrome/browser/vr/sliding_average.h"
#include "chrome/browser/vr/ui.h" #include "chrome/browser/vr/ui.h"
#include "chrome/browser/vr/ui_element_renderer.h" #include "chrome/browser/vr/ui_element_renderer.h"
#include "chrome/browser/vr/ui_scene.h" #include "chrome/browser/vr/ui_scene.h"
...@@ -173,10 +172,12 @@ VrShellGl::VrShellGl(GlBrowserInterface* browser_interface, ...@@ -173,10 +172,12 @@ VrShellGl::VrShellGl(GlBrowserInterface* browser_interface,
binding_(this), binding_(this),
browser_(browser_interface), browser_(browser_interface),
keyboard_delegate_(keyboard_delegate), keyboard_delegate_(keyboard_delegate),
fps_meter_(new vr::FPSMeter()), fps_meter_(),
webvr_js_time_(new vr::SlidingTimeDeltaAverage(kWebVRSlidingAverageSize)), webvr_js_time_(kWebVRSlidingAverageSize),
webvr_render_time_( webvr_render_time_(kWebVRSlidingAverageSize),
new vr::SlidingTimeDeltaAverage(kWebVRSlidingAverageSize)), webvr_js_wait_time_(kWebVRSlidingAverageSize),
webvr_acquire_time_(kWebVRSlidingAverageSize),
webvr_submit_time_(kWebVRSlidingAverageSize),
weak_ptr_factory_(this) { weak_ptr_factory_(this) {
GvrInit(gvr_api); GvrInit(gvr_api);
} }
...@@ -311,7 +312,8 @@ void VrShellGl::CreateOrResizeWebVRSurface(const gfx::Size& size) { ...@@ -311,7 +312,8 @@ void VrShellGl::CreateOrResizeWebVRSurface(const gfx::Size& size) {
} }
void VrShellGl::SubmitFrame(int16_t frame_index, void VrShellGl::SubmitFrame(int16_t frame_index,
const gpu::MailboxHolder& mailbox) { const gpu::MailboxHolder& mailbox,
base::TimeDelta time_waited) {
TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame"); TRACE_EVENT0("gpu", "VrShellGl::SubmitWebVRFrame");
// submit_client_ could be null when we exit presentation, if there were // submit_client_ could be null when we exit presentation, if there were
...@@ -331,6 +333,16 @@ void VrShellGl::SubmitFrame(int16_t frame_index, ...@@ -331,6 +333,16 @@ void VrShellGl::SubmitFrame(int16_t frame_index,
webvr_time_js_submit_[frame_index % kPoseRingBufferSize] = webvr_time_js_submit_[frame_index % kPoseRingBufferSize] =
base::TimeTicks::Now(); base::TimeTicks::Now();
// The JavaScript wait time is supplied externally and not trustworthy. Clamp
// to a reasonable range to avoid math errors.
if (time_waited < base::TimeDelta())
time_waited = base::TimeDelta();
if (time_waited > base::TimeDelta::FromSeconds(1))
time_waited = base::TimeDelta::FromSeconds(1);
webvr_js_wait_time_.AddSample(time_waited);
TRACE_COUNTER1("gpu", "WebVR JS wait (ms)",
webvr_js_wait_time_.GetAverage().InMilliseconds());
// Swapping twice on a Surface without calling updateTexImage in // Swapping twice on a Surface without calling updateTexImage in
// between can lose frames, so don't draw+swap if we already have // between can lose frames, so don't draw+swap if we already have
// a pending frame we haven't consumed yet. // a pending frame we haven't consumed yet.
...@@ -847,7 +859,9 @@ void VrShellGl::DrawFrame(int16_t frame_index, base::TimeTicks current_time) { ...@@ -847,7 +859,9 @@ void VrShellGl::DrawFrame(int16_t frame_index, base::TimeTicks current_time) {
return; return;
TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame"); TRACE_EVENT_BEGIN0("gpu", "VrShellGl::AcquireFrame");
base::TimeTicks acquire_start = base::TimeTicks::Now();
acquired_frame_ = swap_chain_->AcquireFrame(); acquired_frame_ = swap_chain_->AcquireFrame();
webvr_acquire_time_.AddSample(base::TimeTicks::Now() - acquire_start);
TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame"); TRACE_EVENT_END0("gpu", "VrShellGl::AcquireFrame");
if (!acquired_frame_) if (!acquired_frame_)
return; return;
...@@ -995,8 +1009,13 @@ void VrShellGl::DrawFrameSubmitNow(int16_t frame_index, ...@@ -995,8 +1009,13 @@ void VrShellGl::DrawFrameSubmitNow(int16_t frame_index,
gvr::Mat4f mat; gvr::Mat4f mat;
TransformToGvrMat(head_pose, &mat); TransformToGvrMat(head_pose, &mat);
{
TRACE_EVENT0("gpu", "VrShellGl::SubmitToGvr");
base::TimeTicks submit_start = base::TimeTicks::Now();
acquired_frame_.Submit(*buffer_viewport_list_, mat); acquired_frame_.Submit(*buffer_viewport_list_, mat);
webvr_submit_time_.AddSample(base::TimeTicks::Now() - submit_start);
CHECK(!acquired_frame_); CHECK(!acquired_frame_);
}
// No need to swap buffers for surfaceless rendering. // No need to swap buffers for surfaceless rendering.
if (!surfaceless_rendering_) { if (!surfaceless_rendering_) {
...@@ -1019,15 +1038,15 @@ void VrShellGl::DrawFrameSubmitNow(int16_t frame_index, ...@@ -1019,15 +1038,15 @@ void VrShellGl::DrawFrameSubmitNow(int16_t frame_index,
webvr_time_pose_[frame_index % kPoseRingBufferSize]; webvr_time_pose_[frame_index % kPoseRingBufferSize];
base::TimeTicks js_submit_time = base::TimeTicks js_submit_time =
webvr_time_js_submit_[frame_index % kPoseRingBufferSize]; webvr_time_js_submit_[frame_index % kPoseRingBufferSize];
webvr_js_time_->AddSample(js_submit_time - pose_time); webvr_js_time_.AddSample(js_submit_time - pose_time);
webvr_render_time_->AddSample(now - js_submit_time); webvr_render_time_.AddSample(now - js_submit_time);
} }
// After saving the timestamp, fps will be available via GetFPS(). // After saving the timestamp, fps will be available via GetFPS().
// TODO(vollick): enable rendering of this framerate in a HUD. // TODO(vollick): enable rendering of this framerate in a HUD.
fps_meter_->AddFrame(base::TimeTicks::Now()); fps_meter_.AddFrame(base::TimeTicks::Now());
DVLOG(1) << "fps: " << fps_meter_->GetFPS(); DVLOG(1) << "fps: " << fps_meter_.GetFPS();
TRACE_COUNTER1("gpu", "WebVR FPS", fps_meter_->GetFPS()); TRACE_COUNTER1("gpu", "WebVR FPS", fps_meter_.GetFPS());
} }
bool VrShellGl::ShouldDrawWebVr() { bool VrShellGl::ShouldDrawWebVr() {
...@@ -1224,14 +1243,17 @@ base::TimeDelta VrShellGl::GetPredictedFrameTime() { ...@@ -1224,14 +1243,17 @@ base::TimeDelta VrShellGl::GetPredictedFrameTime() {
// If we aim to submit at vsync, that frame will start scanning out // If we aim to submit at vsync, that frame will start scanning out
// one vsync later. Add a half frame to split the difference between // one vsync later. Add a half frame to split the difference between
// left and right eye. // left and right eye.
base::TimeDelta js_time = webvr_js_time_->GetAverageOrDefault(frame_interval); base::TimeDelta js_time = webvr_js_time_.GetAverageOrDefault(frame_interval);
base::TimeDelta render_time = base::TimeDelta render_time =
webvr_render_time_->GetAverageOrDefault(frame_interval); webvr_render_time_.GetAverageOrDefault(frame_interval);
base::TimeDelta overhead_time = frame_interval * 3 / 2; base::TimeDelta overhead_time = frame_interval * 3 / 2;
base::TimeDelta expected_frame_time = js_time + render_time + overhead_time; base::TimeDelta expected_frame_time = js_time + render_time + overhead_time;
TRACE_COUNTER2("gpu", "WebVR frame time (ms)", "javascript", TRACE_COUNTER2("gpu", "WebVR frame time (ms)", "javascript",
js_time.InMilliseconds(), "rendering", js_time.InMilliseconds(), "rendering",
render_time.InMilliseconds()); render_time.InMilliseconds());
TRACE_COUNTER2("gpu", "GVR frame time (ms)", "acquire",
webvr_acquire_time_.GetAverage().InMilliseconds(), "submit",
webvr_submit_time_.GetAverage().InMilliseconds());
TRACE_COUNTER1("gpu", "WebVR pose prediction (ms)", TRACE_COUNTER1("gpu", "WebVR pose prediction (ms)",
expected_frame_time.InMilliseconds()); expected_frame_time.InMilliseconds());
return expected_frame_time; return expected_frame_time;
......
...@@ -20,7 +20,9 @@ ...@@ -20,7 +20,9 @@
#include "chrome/browser/android/vr_shell/vr_controller.h" #include "chrome/browser/android/vr_shell/vr_controller.h"
#include "chrome/browser/vr/content_input_delegate.h" #include "chrome/browser/vr/content_input_delegate.h"
#include "chrome/browser/vr/controller_mesh.h" #include "chrome/browser/vr/controller_mesh.h"
#include "chrome/browser/vr/fps_meter.h"
#include "chrome/browser/vr/model/controller_model.h" #include "chrome/browser/vr/model/controller_model.h"
#include "chrome/browser/vr/sliding_average.h"
#include "chrome/browser/vr/ui_input_manager.h" #include "chrome/browser/vr/ui_input_manager.h"
#include "chrome/browser/vr/ui_renderer.h" #include "chrome/browser/vr/ui_renderer.h"
#include "device/vr/vr_service.mojom.h" #include "device/vr/vr_service.mojom.h"
...@@ -161,7 +163,8 @@ class VrShellGl : public device::mojom::VRPresentationProvider { ...@@ -161,7 +163,8 @@ class VrShellGl : public device::mojom::VRPresentationProvider {
// VRPresentationProvider // VRPresentationProvider
void GetVSync(GetVSyncCallback callback) override; void GetVSync(GetVSyncCallback callback) override;
void SubmitFrame(int16_t frame_index, void SubmitFrame(int16_t frame_index,
const gpu::MailboxHolder& mailbox) override; const gpu::MailboxHolder& mailbox,
base::TimeDelta time_waited) override;
void SubmitFrameWithTextureHandle(int16_t frame_index, void SubmitFrameWithTextureHandle(int16_t frame_index,
mojo::ScopedHandle texture_handle) override; mojo::ScopedHandle texture_handle) override;
void UpdateLayerBounds(int16_t frame_index, void UpdateLayerBounds(int16_t frame_index,
...@@ -252,10 +255,24 @@ class VrShellGl : public device::mojom::VRPresentationProvider { ...@@ -252,10 +255,24 @@ class VrShellGl : public device::mojom::VRPresentationProvider {
// Attributes for gesture detection while holding app button. // Attributes for gesture detection while holding app button.
gfx::Vector3dF controller_start_direction_; gfx::Vector3dF controller_start_direction_;
std::unique_ptr<vr::FPSMeter> fps_meter_; vr::FPSMeter fps_meter_;
std::unique_ptr<vr::SlidingTimeDeltaAverage> webvr_js_time_; // JS time is from SendVSync (pose time) to incoming JS submitFrame.
std::unique_ptr<vr::SlidingTimeDeltaAverage> webvr_render_time_; vr::SlidingTimeDeltaAverage webvr_js_time_;
// Render time is from JS submitFrame to estimated render completion.
// This is an estimate when submitting incomplete frames to GVR.
// If submitFrame blocks, that means the previous frame wasn't done
// rendering yet.
vr::SlidingTimeDeltaAverage webvr_render_time_;
// JS wait time is spent waiting for the previous frame to complete
// rendering, as reported from the Renderer via mojo.
vr::SlidingTimeDeltaAverage webvr_js_wait_time_;
// GVR acquire/submit times for scheduling heuristics.
vr::SlidingTimeDeltaAverage webvr_acquire_time_;
vr::SlidingTimeDeltaAverage webvr_submit_time_;
gfx::Point3F pointer_start_; gfx::Point3F pointer_start_;
......
...@@ -25,7 +25,8 @@ OpenVRRenderLoop::~OpenVRRenderLoop() { ...@@ -25,7 +25,8 @@ OpenVRRenderLoop::~OpenVRRenderLoop() {
} }
void OpenVRRenderLoop::SubmitFrame(int16_t frame_index, void OpenVRRenderLoop::SubmitFrame(int16_t frame_index,
const gpu::MailboxHolder& mailbox) { const gpu::MailboxHolder& mailbox,
base::TimeDelta time_waited) {
NOTREACHED(); NOTREACHED();
} }
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "base/memory/scoped_refptr.h" #include "base/memory/scoped_refptr.h"
#include "base/threading/thread.h" #include "base/threading/thread.h"
#include "base/time/time.h"
#include "build/build_config.h" #include "build/build_config.h"
#include "device/vr/vr_service.mojom.h" #include "device/vr/vr_service.mojom.h"
#include "mojo/public/cpp/bindings/binding.h" #include "mojo/public/cpp/bindings/binding.h"
...@@ -33,7 +34,8 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider { ...@@ -33,7 +34,8 @@ class OpenVRRenderLoop : public base::Thread, mojom::VRPresentationProvider {
// VRPresentationProvider overrides: // VRPresentationProvider overrides:
void SubmitFrame(int16_t frame_index, void SubmitFrame(int16_t frame_index,
const gpu::MailboxHolder& mailbox) override; const gpu::MailboxHolder& mailbox,
base::TimeDelta time_waited) override;
void SubmitFrameWithTextureHandle(int16_t frame_index, void SubmitFrameWithTextureHandle(int16_t frame_index,
mojo::ScopedHandle texture_handle) override; mojo::ScopedHandle texture_handle) override;
void UpdateLayerBounds(int16_t frame_id, void UpdateLayerBounds(int16_t frame_id,
......
...@@ -131,7 +131,8 @@ interface VRPresentationProvider { ...@@ -131,7 +131,8 @@ interface VRPresentationProvider {
// no mapping. // no mapping.
GetVSync() => (VRPose? pose, mojo.common.mojom.TimeDelta time, int16 frame_id, GetVSync() => (VRPose? pose, mojo.common.mojom.TimeDelta time, int16 frame_id,
VSyncStatus status); VSyncStatus status);
SubmitFrame(int16 frame_id, gpu.mojom.MailboxHolder mailbox_holder); SubmitFrame(int16 frame_id, gpu.mojom.MailboxHolder mailbox_holder,
mojo.common.mojom.TimeDelta time_waited);
// TODO(https://crbug.com/676224): Support preprocessing of mojom files, since // TODO(https://crbug.com/676224): Support preprocessing of mojom files, since
// this is Windows only. // this is Windows only.
......
...@@ -65,7 +65,7 @@ class MockVRPresentationProvider { ...@@ -65,7 +65,7 @@ class MockVRPresentationProvider {
this.binding_.bind(request); this.binding_.bind(request);
} }
submitFrame(frameId, mailboxHolder) { submitFrame(frameId, mailboxHolder, timeWaited) {
// Trigger the submit completion callbacks here. WARNING: The // Trigger the submit completion callbacks here. WARNING: The
// Javascript-based mojo mocks are *not* re-entrant. In the current // Javascript-based mojo mocks are *not* re-entrant. In the current
// default implementation, Javascript calls display.submitFrame, and the // default implementation, Javascript calls display.submitFrame, and the
......
...@@ -506,33 +506,53 @@ ScriptPromise VRDisplay::exitPresent(ScriptState* script_state) { ...@@ -506,33 +506,53 @@ ScriptPromise VRDisplay::exitPresent(ScriptState* script_state) {
return promise; return promise;
} }
void VRDisplay::BeginPresent() { bool VRDisplay::ConfigurePresentationPathForDisplay() {
Document* doc = this->GetDocument(); // TODO(klausw): capabilities_ should provide such information more directly.
// Currently, there's only two presentation paths which happen to align with
// having an external display (desktop devices such as OpenVR) or not (mobile
// VR on Android).
if (capabilities_->hasExternalDisplay()) { if (capabilities_->hasExternalDisplay()) {
// Presenting with external displays has to make a copy of the image frame_transport_method_ = FrameTransport::kTextureHandle;
// since the canvas may still be visible at the same time. wait_for_previous_render_ = WaitPrevStrategy::kNoWait;
present_image_needs_copy_ = true;
} else { } else {
if (layer_.source().IsHTMLCanvasElement()) { frame_transport_method_ = FrameTransport::kMailbox;
// TODO(klausw,crbug.com/698923): suppress compositor updates wait_for_previous_render_ = WaitPrevStrategy::kAfterBitmap;
// since they aren't needed, they do a fair amount of extra }
// work. return true;
} else { }
DCHECK(layer_.source().IsOffscreenCanvas());
void VRDisplay::BeginPresent() {
Document* doc = this->GetDocument();
DOMException* exception = nullptr;
if (!ConfigurePresentationPathForDisplay()) {
exception = DOMException::Create(
kInvalidStateError, "VRDisplay presentation path not implemented.");
}
if (layer_.source().IsOffscreenCanvas()) {
// TODO(junov, crbug.com/695497): Implement OffscreenCanvas presentation // TODO(junov, crbug.com/695497): Implement OffscreenCanvas presentation
ForceExitPresent(); exception = DOMException::Create(
DOMException* exception = DOMException::Create(
kInvalidStateError, "OffscreenCanvas presentation not implemented."); kInvalidStateError, "OffscreenCanvas presentation not implemented.");
} else {
// A canvas must be either Offscreen or plain HTMLCanvas.
DCHECK(layer_.source().IsHTMLCanvasElement());
}
if (exception) {
ForceExitPresent();
while (!pending_present_resolvers_.IsEmpty()) { while (!pending_present_resolvers_.IsEmpty()) {
ScriptPromiseResolver* resolver = ScriptPromiseResolver* resolver = pending_present_resolvers_.TakeFirst();
pending_present_resolvers_.TakeFirst();
resolver->Reject(exception); resolver->Reject(exception);
} }
ReportPresentationResult( ReportPresentationResult(
PresentationResult::kPresentationNotSupportedByDisplay); PresentationResult::kPresentationNotSupportedByDisplay);
return; return;
} }
}
// Presenting with external displays has to make a copy of the image
// since the canvas may still be visible at the same time.
present_image_needs_copy_ = capabilities_->hasExternalDisplay();
if (doc) { if (doc) {
Platform::Current()->RecordRapporURL("VR.WebVR.PresentSuccess", Platform::Current()->RecordRapporURL("VR.WebVR.PresentSuccess",
...@@ -660,26 +680,17 @@ void VRDisplay::submitFrame() { ...@@ -660,26 +680,17 @@ void VRDisplay::submitFrame() {
UpdateLayerBounds(); UpdateLayerBounds();
} }
// There's two types of synchronization needed for submitting frames: // Ensure that required device selections were made.
// DCHECK(frame_transport_method_ != FrameTransport::kUninitialized);
// - Before submitting, need to wait for the previous frame to be DCHECK(wait_for_previous_render_ != WaitPrevStrategy::kUninitialized);
// pulled off the transfer surface to avoid lost frames. This
// is currently a compile-time option, normally we always want WTF::TimeDelta wait_time;
// to defer this wait to increase parallelism. // Conditionally wait for the previous render to finish, to avoid losing
// // frames in the Android Surface / GLConsumer pair. An early wait here is
// - After submitting, need to wait for the mailbox to be consumed, // appropriate when using a GpuFence to separate drawing, the new frame isn't
// and the image object must remain alive during this time. // complete yet at this stage.
// We keep a reference to the image so that we can defer this if (wait_for_previous_render_ == WaitPrevStrategy::kBeforeBitmap)
// wait. Here, we wait for the previous transfer to complete. wait_time += WaitForPreviousRenderToFinish();
{
TRACE_EVENT0("gpu", "VRDisplay::waitForPreviousTransferToFinish");
while (pending_submit_frame_) {
if (!submit_frame_client_binding_.WaitForIncomingMethodCall()) {
DLOG(ERROR) << "Failed to receive SubmitFrame response";
break;
}
}
}
TRACE_EVENT_BEGIN0("gpu", "VRDisplay::GetStaticBitmapImage"); TRACE_EVENT_BEGIN0("gpu", "VRDisplay::GetStaticBitmapImage");
scoped_refptr<Image> image_ref = rendering_context_->GetStaticBitmapImage(); scoped_refptr<Image> image_ref = rendering_context_->GetStaticBitmapImage();
...@@ -703,8 +714,10 @@ void VRDisplay::submitFrame() { ...@@ -703,8 +714,10 @@ void VRDisplay::submitFrame() {
} }
} }
if (present_image_needs_copy_) { if (frame_transport_method_ == FrameTransport::kTextureHandle) {
#if defined(OS_WIN) #if defined(OS_WIN)
// Currently, we assume that this transport needs a copy.
DCHECK(present_image_needs_copy_);
TRACE_EVENT0("gpu", "VRDisplay::CopyImage"); TRACE_EVENT0("gpu", "VRDisplay::CopyImage");
if (!frame_copier_ || !last_transfer_succeeded_) { if (!frame_copier_ || !last_transfer_succeeded_) {
frame_copier_ = std::make_unique<GpuMemoryBufferImageCopy>(context_gl_); frame_copier_ = std::make_unique<GpuMemoryBufferImageCopy>(context_gl_);
...@@ -725,7 +738,11 @@ void VRDisplay::submitFrame() { ...@@ -725,7 +738,11 @@ void VRDisplay::submitFrame() {
#else #else
NOTIMPLEMENTED(); NOTIMPLEMENTED();
#endif #endif
} else { } else if (frame_transport_method_ == FrameTransport::kMailbox) {
// Currently, this transport assumes we don't need to make a separate copy
// of the canvas content.
DCHECK(!present_image_needs_copy_);
// The AcceleratedStaticBitmapImage must be kept alive until the // The AcceleratedStaticBitmapImage must be kept alive until the
// mailbox is used via createAndConsumeTextureCHROMIUM, the mailbox // mailbox is used via createAndConsumeTextureCHROMIUM, the mailbox
// itself does not keep it alive. We must keep a reference to the // itself does not keep it alive. We must keep a reference to the
...@@ -736,27 +753,20 @@ void VRDisplay::submitFrame() { ...@@ -736,27 +753,20 @@ void VRDisplay::submitFrame() {
static_image->EnsureMailbox(kVerifiedSyncToken); static_image->EnsureMailbox(kVerifiedSyncToken);
TRACE_EVENT_END0("gpu", "VRDisplay::EnsureMailbox"); TRACE_EVENT_END0("gpu", "VRDisplay::EnsureMailbox");
// Conditionally wait for the previous render to finish. A late wait here
// attempts to overlap work in parallel with the previous frame's
// rendering. This is used if submitting fully rendered frames to GVR, but
// is susceptible to bad GPU scheduling if the new frame competes with the
// previous frame's incomplete rendering.
if (wait_for_previous_render_ == WaitPrevStrategy::kAfterBitmap)
wait_time += WaitForPreviousRenderToFinish();
// Save a reference to the image to keep it alive until next frame, // Save a reference to the image to keep it alive until next frame,
// where we'll wait for the transfer to finish before overwriting // but first wait for the transfer to finish before overwriting it.
// it. // Usually this check is satisfied without waiting.
WaitForPreviousTransfer();
previous_image_ = std::move(image_ref); previous_image_ = std::move(image_ref);
// Wait for the previous render to finish, to avoid losing frames in the
// Android Surface / GLConsumer pair. TODO(klausw): make this tunable?
// Other devices may have different preferences. Do this step as late
// as possible before SubmitFrame to ensure we can do as much work as
// possible in parallel with the previous frame's rendering.
{
TRACE_EVENT0("gpu", "waitForPreviousRenderToFinish");
while (pending_previous_frame_render_) {
if (!submit_frame_client_binding_.WaitForIncomingMethodCall()) {
DLOG(ERROR) << "Failed to receive SubmitFrame response";
break;
}
}
}
pending_previous_frame_render_ = true;
pending_submit_frame_ = true; pending_submit_frame_ = true;
// Create mailbox and sync token for transfer. // Create mailbox and sync token for transfer.
...@@ -767,10 +777,14 @@ void VRDisplay::submitFrame() { ...@@ -767,10 +777,14 @@ void VRDisplay::submitFrame() {
TRACE_EVENT_BEGIN0("gpu", "VRDisplay::SubmitFrame"); TRACE_EVENT_BEGIN0("gpu", "VRDisplay::SubmitFrame");
vr_presentation_provider_->SubmitFrame( vr_presentation_provider_->SubmitFrame(
vr_frame_id_, gpu::MailboxHolder(mailbox, sync_token, GL_TEXTURE_2D)); vr_frame_id_, gpu::MailboxHolder(mailbox, sync_token, GL_TEXTURE_2D),
wait_time);
TRACE_EVENT_END0("gpu", "VRDisplay::SubmitFrame"); TRACE_EVENT_END0("gpu", "VRDisplay::SubmitFrame");
} else {
NOTREACHED() << "Unimplemented frame_transport_method_";
} }
pending_previous_frame_render_ = true;
did_submit_this_frame_ = true; did_submit_this_frame_ = true;
// Reset our frame id, since anything we'd want to do (resizing/etc) can // Reset our frame id, since anything we'd want to do (resizing/etc) can
// no-longer happen to this frame. // no-longer happen to this frame.
...@@ -795,6 +809,28 @@ void VRDisplay::OnSubmitFrameRendered() { ...@@ -795,6 +809,28 @@ void VRDisplay::OnSubmitFrameRendered() {
pending_previous_frame_render_ = false; pending_previous_frame_render_ = false;
} }
void VRDisplay::WaitForPreviousTransfer() {
TRACE_EVENT0("gpu", "VRDisplay::waitForPreviousTransferToFinish");
while (pending_submit_frame_) {
if (!submit_frame_client_binding_.WaitForIncomingMethodCall()) {
DLOG(ERROR) << "Failed to receive SubmitFrame response";
break;
}
}
}
WTF::TimeDelta VRDisplay::WaitForPreviousRenderToFinish() {
TRACE_EVENT0("gpu", "waitForPreviousRenderToFinish");
WTF::TimeTicks start = WTF::TimeTicks::Now();
while (pending_previous_frame_render_) {
if (!submit_frame_client_binding_.WaitForIncomingMethodCall()) {
DLOG(ERROR) << "Failed to receive SubmitFrame response";
break;
}
}
return WTF::TimeTicks::Now() - start;
}
Document* VRDisplay::GetDocument() { Document* VRDisplay::GetDocument() {
return navigator_vr_->GetDocument(); return navigator_vr_->GetDocument();
} }
...@@ -856,6 +892,8 @@ void VRDisplay::StopPresenting() { ...@@ -856,6 +892,8 @@ void VRDisplay::StopPresenting() {
pending_submit_frame_ = false; pending_submit_frame_ = false;
pending_previous_frame_render_ = false; pending_previous_frame_render_ = false;
did_submit_this_frame_ = false; did_submit_this_frame_ = false;
frame_transport_method_ = FrameTransport::kUninitialized;
wait_for_previous_render_ = WaitPrevStrategy::kUninitialized;
} }
void VRDisplay::OnActivate(device::mojom::blink::VRDisplayEventReason reason, void VRDisplay::OnActivate(device::mojom::blink::VRDisplayEventReason reason,
......
...@@ -123,6 +123,40 @@ class VRDisplay final : public EventTargetWithInlineData, ...@@ -123,6 +123,40 @@ class VRDisplay final : public EventTargetWithInlineData,
VRController* Controller(); VRController* Controller();
private: private:
// Specifies how submitFrame should transport frame data for the presenting
// VR device, set by ConfigurePresentationPathForDisplay().
enum class FrameTransport {
// Invalid default value. Must be changed to a valid choice before starting
// presentation.
kUninitialized,
// Command buffer CHROMIUM_texture_mailbox. Used by the Android Surface
// rendering path.
kMailbox,
// A TextureHandle as extracted from a GpuMemoryBufferHandle. Used with
// DXGI texture handles for OpenVR on Windows.
kTextureHandle,
};
// Some implementations need to synchronize submitting with the completion of
// the previous frame, i.e. the Android surface path needs to wait to avoid
// lost frames in the transfer surface and to avoid overstuffed buffers. The
// strategy choice here indicates at which point in the submission process
// it should wait. NO_WAIT means to skip this wait entirely. For example,
// the OpenVR render pipeline doesn't overlap frames, so the previous
// frame is already guaranteed complete.
enum class WaitPrevStrategy {
kUninitialized,
kNoWait,
kBeforeBitmap,
kAfterBitmap,
};
bool ConfigurePresentationPathForDisplay();
void WaitForPreviousTransfer();
WTF::TimeDelta WaitForPreviousRenderToFinish();
void OnPresentComplete(bool); void OnPresentComplete(bool);
void OnConnected(); void OnConnected();
...@@ -204,6 +238,9 @@ class VRDisplay final : public EventTargetWithInlineData, ...@@ -204,6 +238,9 @@ class VRDisplay final : public EventTargetWithInlineData,
// waitForPreviousTransferToFinish. // waitForPreviousTransferToFinish.
scoped_refptr<Image> previous_image_; scoped_refptr<Image> previous_image_;
FrameTransport frame_transport_method_ = FrameTransport::kUninitialized;
WaitPrevStrategy wait_for_previous_render_ = WaitPrevStrategy::kUninitialized;
TraceWrapperMember<ScriptedAnimationController> TraceWrapperMember<ScriptedAnimationController>
scripted_animation_controller_; scripted_animation_controller_;
bool pending_vrdisplay_raf_ = false; bool pending_vrdisplay_raf_ = false;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment