Commit 5e0ff7a0 authored by Kevin Qin's avatar Kevin Qin Committed by Commit Bot

Write and readback pixels to a texture in OpenXR tests

The old implementation was sort of a hack to be able to figure out
what values to appropriately set based on the configured IPD that
the device set (since one test set specific colors and also had
a different IPD than the rest).The new implementation fix this
by reading back from the texture. So the hack is no longer needed.

These should instead store the
pixels to a texture when the frame is submitted, and then read back
from the texture.
Tests affected as of 8/28/2019:
WebXrVrOpenXrBrowserTest.TestPresentationPixels
WebXrVrOpenXrBrowserTest.TestPresentationPoses
WebXrVrOpenXrBrowserTest.TestLocationIndicatorWhenUserAskedToPrompt
WebXrVrOpenXrBrowserTest,TestMultipleInitialIndicators_OneDeviceAllowed

Fixed: 986621
Change-Id: I18421f1adc136bb4e1799026af486cfe85fbff05
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1970257
Commit-Queue: Zheng Qin <zheqi@microsoft.com>
Reviewed-by: default avatarAlexander Cooper <alcooper@chromium.org>
Cr-Commit-Position: refs/heads/master@{#726515}
parent 72af3a05
...@@ -161,7 +161,6 @@ std::string GetPoseAsString(const Frame& frame) { ...@@ -161,7 +161,6 @@ std::string GetPoseAsString(const Frame& frame) {
} // namespace } // namespace
// TODO(crbug.com/986621) - OpenXR currently hard codes data
// Pixel test for WebXR - start presentation, submit frames, get data back out. // Pixel test for WebXR - start presentation, submit frames, get data back out.
// Validates that submitted frames used expected pose. // Validates that submitted frames used expected pose.
WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(TestPresentationPoses) { WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(TestPresentationPoses) {
......
...@@ -160,11 +160,7 @@ WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F( ...@@ -160,11 +160,7 @@ WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(
UserFriendlyElementName::kWebXrLocationPermissionIndicator, false}}); UserFriendlyElementName::kWebXrLocationPermissionIndicator, false}});
} }
// TODO(crbug.com/986621) - Enable for OpenXR WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(
IN_PROC_MULTI_CLASS_BROWSER_TEST_F2(
WebXrVrOpenVrBrowserTest,
WebXrVrWmrBrowserTest,
WebXrVrBrowserTestBase,
TestLocationIndicatorWhenUserAskedToPrompt) { TestLocationIndicatorWhenUserAskedToPrompt) {
TestForInitialIndicatorForContentType( TestForInitialIndicatorForContentType(
t, {{ContentSettingsType::GEOLOCATION, CONTENT_SETTING_ASK, t, {{ContentSettingsType::GEOLOCATION, CONTENT_SETTING_ASK,
...@@ -186,12 +182,7 @@ WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F( ...@@ -186,12 +182,7 @@ WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(
}); });
} }
// TODO(crbug.com/986621) - Enable for OpenXR WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(
IN_PROC_MULTI_CLASS_BROWSER_TEST_F2(
WebXrVrOpenVrBrowserTest,
WebXrVrWmrBrowserTest,
WebXrVrBrowserTestBase,
TestMultipleInitialIndicators_OneDeviceAllowed) { TestMultipleInitialIndicators_OneDeviceAllowed) {
TestForInitialIndicatorForContentType( TestForInitialIndicatorForContentType(
t, t,
......
...@@ -87,7 +87,6 @@ void TestPresentationPixelsImpl(WebXrVrBrowserTestBase* t, ...@@ -87,7 +87,6 @@ void TestPresentationPixelsImpl(WebXrVrBrowserTestBase* t,
<< "Alpha channel of submitted color does not match expectation"; << "Alpha channel of submitted color does not match expectation";
} }
// TODO(crbug.com/986621) - OpenXR currently hard codes data
WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(TestPresentationPixels) { WEBXR_VR_ALL_RUNTIMES_BROWSER_TEST_F(TestPresentationPixels) {
TestPresentationPixelsImpl(t, "test_webxr_pixels"); TestPresentationPixelsImpl(t, "test_webxr_pixels");
} }
......
...@@ -94,41 +94,81 @@ void OpenXrTestHelper::SetTestHook(device::VRTestHook* hook) { ...@@ -94,41 +94,81 @@ void OpenXrTestHelper::SetTestHook(device::VRTestHook* hook) {
} }
void OpenXrTestHelper::OnPresentedFrame() { void OpenXrTestHelper::OnPresentedFrame() {
static uint32_t frame_id = 1; DCHECK_NE(textures_arr_.size(), 0ull);
D3D11_TEXTURE2D_DESC desc;
device::SubmittedFrameData left_data = {};
textures_arr_[acquired_swapchain_texture_]->GetDesc(&desc);
left_data.image_width = desc.Width;
left_data.image_height = desc.Height;
device::SubmittedFrameData right_data = left_data;
left_data.left_eye = true;
right_data.left_eye = false;
CopyTextureDataIntoFrameData(&left_data, true);
CopyTextureDataIntoFrameData(&right_data, false);
base::AutoLock auto_lock(lock_); base::AutoLock auto_lock(lock_);
if (!test_hook_) if (!test_hook_)
return; return;
// TODO(https://crbug.com/986621): The frame color is currently hard-coded to test_hook_->OnFrameSubmitted(left_data);
// what the pixel tests expects. We should instead store the actual WebGL test_hook_->OnFrameSubmitted(right_data);
// texture and read from it, which will also verify the correct swapchain }
// texture was used.
device::DeviceConfig device_config = test_hook_->WaitGetDeviceConfig();
device::SubmittedFrameData frame_data = {};
if (std::abs(device_config.interpupillary_distance - 0.2f) <
std::numeric_limits<float>::epsilon()) {
// TestPresentationPoses sets the ipd to 0.2f, whereas tests by default have
// an ipd of 0.1f. This test has specific formulas to determine the colors,
// specified in test_webxr_poses.html.
frame_data.color = {
frame_id % 256, ((frame_id - frame_id % 256) / 256) % 256,
((frame_id - frame_id % (256 * 256)) / (256 * 256)) % 256, 255};
} else {
// The WebXR tests by default clears to blue. TestPresentationPixels
// verifies this color.
frame_data.color = {0, 0, 255, 255};
}
frame_data.left_eye = true; void OpenXrTestHelper::CopyTextureDataIntoFrameData(
test_hook_->OnFrameSubmitted(frame_data); device::SubmittedFrameData* data,
bool left) {
DCHECK(d3d_device_);
DCHECK_NE(textures_arr_.size(), 0ull);
Microsoft::WRL::ComPtr<ID3D11DeviceContext> context;
d3d_device_->GetImmediateContext(&context);
frame_data.left_eye = false; size_t buffer_size = sizeof(device::SubmittedFrameData::raw_buffer);
test_hook_->OnFrameSubmitted(frame_data); size_t buffer_size_pixels = buffer_size / sizeof(device::Color);
frame_id++; // We copy the submitted texture to a new texture, so we can map it, and
// read back pixel data.
auto desc = CD3D11_TEXTURE2D_DESC();
desc.ArraySize = 1;
desc.Width = buffer_size_pixels;
desc.Height = 1;
desc.MipLevels = 1;
desc.SampleDesc.Count = 1;
desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
desc.Usage = D3D11_USAGE_STAGING;
desc.BindFlags = 0;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
Microsoft::WRL::ComPtr<ID3D11Texture2D> texture_destination;
HRESULT hr =
d3d_device_->CreateTexture2D(&desc, nullptr, &texture_destination);
DCHECK_EQ(hr, S_OK);
// A strip of pixels along the top of the texture, however many will fit into
// our buffer.
D3D11_BOX box;
if (left) {
box = {0, 0, 0, buffer_size_pixels, 1, 1};
} else {
box = {kDimension, 0, 0, kDimension + buffer_size_pixels, 1, 1};
}
context->CopySubresourceRegion(
texture_destination.Get(), 0, 0, 0, 0,
textures_arr_[acquired_swapchain_texture_].Get(), 0, &box);
D3D11_MAPPED_SUBRESOURCE map_data = {};
hr = context->Map(texture_destination.Get(), 0, D3D11_MAP_READ, 0, &map_data);
DCHECK_EQ(hr, S_OK);
// We have a 1-pixel image, so store it in the provided SubmittedFrameData
// along with the raw data.
device::Color* color = static_cast<device::Color*>(map_data.pData);
data->color = color[0];
memcpy(&data->raw_buffer, map_data.pData, buffer_size);
context->Unmap(texture_destination.Get(), 0);
} }
XrSystemId OpenXrTestHelper::GetSystemId() { XrSystemId OpenXrTestHelper::GetSystemId() {
......
...@@ -165,6 +165,8 @@ class OpenXrTestHelper : public device::ServiceTestHook { ...@@ -165,6 +165,8 @@ class OpenXrTestHelper : public device::ServiceTestHook {
ActionProperties(const ActionProperties& other); ActionProperties(const ActionProperties& other);
}; };
void CopyTextureDataIntoFrameData(device::SubmittedFrameData* data,
bool left);
XrResult UpdateAction(XrAction action); XrResult UpdateAction(XrAction action);
void SetSessionState(XrSessionState state); void SetSessionState(XrSessionState state);
base::Optional<gfx::Transform> GetPose(); base::Optional<gfx::Transform> GetPose();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment