Commit 105f5197 authored by Peng Huang's avatar Peng Huang Committed by Commit Bot

Reland "VulkanImage: support create from GpuMemoryBufferHandle on Android"

This is a reland of 838eb034

Original change's description:
> VulkanImage: support create from GpuMemoryBufferHandle on Android
>
> VulkanImage::CreateFromGpuMemoryBufferHandle() is added to replace
> VulkanImplementation::CreateVkImageAndImportAHB(), and will replace
> VulkanImplementation::CreateImageFromGpuMemoryHandle() as well.
>
> Bug: None
> Change-Id: I7c6e3cea685f1ec305790f4a6d7bb66164f26e55
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2114587
> Commit-Queue: Peng Huang <penghuang@chromium.org>
> Reviewed-by: Robert Kroeger <rjkroege@chromium.org>
> Reviewed-by: Vasiliy Telezhnikov <vasilyt@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#753394}

TBR=rjkroege@chromium.org,vasilyt@chromium.org

Cq-Include-Trybots: luci.chromium.try:android_archive_rel_ng
Bug: None
Change-Id: Ie6132474ef4a386ed661116aa8f580fd8460901d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2121442Reviewed-by: default avatarPeng Huang <penghuang@chromium.org>
Commit-Queue: Peng Huang <penghuang@chromium.org>
Cr-Commit-Position: refs/heads/master@{#753501}
parent 49bf8898
......@@ -19,6 +19,7 @@
#include "gpu/ipc/common/android/android_image_reader_utils.h"
#include "gpu/vulkan/vulkan_fence_helper.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
#include "gpu/vulkan/vulkan_image.h"
#include "gpu/vulkan/vulkan_implementation.h"
#include "third_party/skia/include/gpu/GrBackendSemaphore.h"
#include "third_party/skia/include/gpu/GrContext.h"
......@@ -26,6 +27,7 @@
#include "third_party/skia/include/gpu/vk/GrVkExtensions.h"
#include "third_party/skia/src/gpu/vk/GrVkSecondaryCBDrawContext.h"
#include "ui/gfx/color_space.h"
#include "ui/gfx/gpu_memory_buffer.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_context_egl.h"
#include "ui/gl/gl_image_ahardwarebuffer.h"
......@@ -462,32 +464,29 @@ void AwDrawFnImpl::DrawVkInterop(AwDrawFn_DrawVkParams* params) {
}
// Create a VkImage and import AHB.
if (!pending_draw->image_info.fImage) {
VkImage vk_image;
VkImageCreateInfo vk_image_info;
VkDeviceMemory vk_device_memory;
VkDeviceSize mem_allocation_size;
if (!vulkan_context_provider_->implementation()->CreateVkImageAndImportAHB(
vulkan_context_provider_->device(),
vulkan_context_provider_->physical_device(),
gfx::Size(params->width, params->height),
base::android::ScopedHardwareBufferHandle::Create(
pending_draw->ahb_image->GetAHardwareBuffer()->buffer()),
&vk_image, &vk_image_info, &vk_device_memory,
&mem_allocation_size)) {
if (!pending_draw->vulkan_image) {
auto handle = base::android::ScopedHardwareBufferHandle::Create(
pending_draw->ahb_image->GetAHardwareBuffer()->buffer());
gfx::GpuMemoryBufferHandle gmb_handle(std::move(handle));
auto* device_queue = vulkan_context_provider_->GetDeviceQueue();
auto vulkan_image = gpu::VulkanImage::CreateFromGpuMemoryBufferHandle(
device_queue, std::move(gmb_handle),
gfx::Size(params->width, params->height), VK_FORMAT_R8G8B8A8_UNORM,
0 /* usage */);
if (!vulkan_image) {
LOG(ERROR) << "Could not create VkImage from AHB.";
return;
}
// Create backend texture from the VkImage.
GrVkAlloc alloc = {vk_device_memory, 0, mem_allocation_size, 0};
pending_draw->image_info = {vk_image,
alloc,
vk_image_info.tiling,
vk_image_info.initialLayout,
vk_image_info.format,
vk_image_info.mipLevels,
VK_QUEUE_FAMILY_EXTERNAL};
GrVkAlloc alloc(vulkan_image->device_memory(), 0 /* offset */,
vulkan_image->device_size(), 0 /* flags */);
pending_draw->image_info = GrVkImageInfo(
vulkan_image->image(), alloc, vulkan_image->image_tiling(),
VK_IMAGE_LAYOUT_UNDEFINED, vulkan_image->format(), 1 /* levelCount */,
VK_QUEUE_FAMILY_EXTERNAL);
pending_draw->vulkan_image = std::move(vulkan_image);
}
// Create an SkImage from AHB.
......@@ -655,11 +654,10 @@ AwDrawFnImpl::InFlightInteropDraw::~InFlightInteropDraw() {
glDeleteTextures(1, &texture_id);
if (framebuffer_id)
glDeleteFramebuffersEXT(1, &framebuffer_id);
if (image_info.fImage != VK_NULL_HANDLE) {
if (vulkan_image) {
vk_context_provider->GetDeviceQueue()
->GetFenceHelper()
->EnqueueImageCleanupForSubmittedWork(image_info.fImage,
image_info.fAlloc.fMemory);
->EnqueueVulkanObjectCleanupForSubmittedWork(std::move(vulkan_image));
}
}
......
......@@ -25,6 +25,10 @@ namespace gl {
class GLImageAHardwareBuffer;
}
namespace gpu {
class VulkanImage;
}
namespace android_webview {
class GLNonOwnedCompatibilityContext;
......@@ -74,6 +78,7 @@ class AwDrawFnImpl {
sk_sp<SkImage> ahb_skimage;
uint32_t texture_id = 0;
uint32_t framebuffer_id = 0;
std::unique_ptr<gpu::VulkanImage> vulkan_image;
GrVkImageInfo image_info;
// Used to clean up Vulkan objects.
......
......@@ -38,6 +38,7 @@
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_fence_helper.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
#include "gpu/vulkan/vulkan_image.h"
#include "gpu/vulkan/vulkan_implementation.h"
#include "gpu/vulkan/vulkan_util.h"
#include "third_party/skia/include/core/SkPromiseImageTexture.h"
......@@ -56,66 +57,6 @@
namespace gpu {
namespace {
sk_sp<SkPromiseImageTexture> CreatePromiseTextureAHB(
viz::VulkanContextProvider* context_provider,
base::android::ScopedHardwareBufferHandle ahb_handle,
gfx::Size size,
viz::ResourceFormat format) {
VulkanImplementation* vk_implementation =
context_provider->GetVulkanImplementation();
VkDevice vk_device = context_provider->GetDeviceQueue()->GetVulkanDevice();
VkPhysicalDevice vk_physical_device =
context_provider->GetDeviceQueue()->GetVulkanPhysicalDevice();
// Create a VkImage and import AHB.
VkImage vk_image;
VkImageCreateInfo vk_image_info;
VkDeviceMemory vk_device_memory;
VkDeviceSize mem_allocation_size;
if (!vk_implementation->CreateVkImageAndImportAHB(
vk_device, vk_physical_device, size, std::move(ahb_handle), &vk_image,
&vk_image_info, &vk_device_memory, &mem_allocation_size)) {
return nullptr;
}
// Create backend texture from the VkImage.
GrVkAlloc alloc = {vk_device_memory, 0, mem_allocation_size, 0};
GrVkImageInfo vk_info = {vk_image,
alloc,
vk_image_info.tiling,
vk_image_info.initialLayout,
vk_image_info.format,
vk_image_info.mipLevels,
VK_QUEUE_FAMILY_EXTERNAL};
// TODO(bsalomon): Determine whether it makes sense to attempt to reuse this
// if the vk_info stays the same on subsequent calls.
auto promise_texture = SkPromiseImageTexture::Make(
GrBackendTexture(size.width(), size.height(), vk_info));
if (!promise_texture) {
vkDestroyImage(vk_device, vk_image, nullptr);
vkFreeMemory(vk_device, vk_device_memory, nullptr);
return nullptr;
}
return promise_texture;
}
void DestroyVkPromiseTextureAHB(viz::VulkanContextProvider* context_provider,
sk_sp<SkPromiseImageTexture> promise_texture) {
DCHECK(promise_texture);
DCHECK(promise_texture->unique());
GrVkImageInfo vk_image_info;
bool result =
promise_texture->backendTexture().getVkImageInfo(&vk_image_info);
DCHECK(result);
VulkanFenceHelper* fence_helper =
context_provider->GetDeviceQueue()->GetFenceHelper();
fence_helper->EnqueueImageCleanupForSubmittedWork(
vk_image_info.fImage, vk_image_info.fAlloc.fMemory);
}
class OverlayImage final : public gl::GLImage {
public:
explicit OverlayImage(AHardwareBuffer* buffer)
......@@ -328,21 +269,38 @@ class SharedImageRepresentationSkiaVkAHB
SharedImageManager* manager,
SharedImageBacking* backing,
scoped_refptr<SharedContextState> context_state,
sk_sp<SkPromiseImageTexture> promise_texture,
std::unique_ptr<VulkanImage> vulkan_image,
MemoryTypeTracker* tracker)
: SharedImageRepresentationSkia(manager, backing, tracker),
promise_texture_(std::move(promise_texture)),
vulkan_image_(std::move(vulkan_image)),
context_state_(std::move(context_state)) {
DCHECK(promise_texture_);
DCHECK(vulkan_image_);
DCHECK(context_state_);
DCHECK(context_state_->vk_context_provider());
// Create backend texture from the VkImage.
GrVkAlloc alloc(vulkan_image_->device_memory(), 0 /* offset */,
vulkan_image_->device_size(), 0 /* flags */);
GrVkImageInfo vk_info(vulkan_image_->image(), alloc,
vulkan_image_->image_tiling(),
VK_IMAGE_LAYOUT_UNDEFINED, vulkan_image_->format(),
1 /* levelCount */, VK_QUEUE_FAMILY_EXTERNAL);
// TODO(bsalomon): Determine whether it makes sense to attempt to reuse this
// if the vk_info stays the same on subsequent calls.
promise_texture_ = SkPromiseImageTexture::Make(
GrBackendTexture(size().width(), size().height(), vk_info));
DCHECK(promise_texture_);
}
~SharedImageRepresentationSkiaVkAHB() override {
DestroyVkPromiseTextureAHB(context_state_->vk_context_provider(),
std::move(promise_texture_));
DCHECK_EQ(mode_, RepresentationAccessMode::kNone);
DCHECK(!surface_);
DCHECK(vulkan_image_);
VulkanFenceHelper* fence_helper = context_state_->vk_context_provider()
->GetDeviceQueue()
->GetFenceHelper();
fence_helper->EnqueueVulkanObjectCleanupForSubmittedWork(
std::move(vulkan_image_));
}
sk_sp<SkSurface> BeginWriteAccess(
......@@ -495,6 +453,7 @@ class SharedImageRepresentationSkiaVkAHB
mode_ = RepresentationAccessMode::kNone;
}
std::unique_ptr<VulkanImage> vulkan_image_;
sk_sp<SkPromiseImageTexture> promise_texture_;
RepresentationAccessMode mode_ = RepresentationAccessMode::kNone;
SkSurface* surface_ = nullptr;
......@@ -650,12 +609,16 @@ SharedImageBackingAHB::ProduceSkia(
// Check whether we are in Vulkan mode OR GL mode and accordingly create
// Skia representation.
if (context_state->GrContextIsVulkan()) {
sk_sp<SkPromiseImageTexture> promise_texture = CreatePromiseTextureAHB(
context_state->vk_context_provider(), GetAhbHandle(), size(), format());
if (!promise_texture)
auto* device_queue = context_state->vk_context_provider()->GetDeviceQueue();
gfx::GpuMemoryBufferHandle gmb_handle(GetAhbHandle());
auto vulkan_image = VulkanImage::CreateFromGpuMemoryBufferHandle(
device_queue, std::move(gmb_handle), size(), ToVkFormat(format()),
0 /* usage */);
if (!vulkan_image)
return nullptr;
return std::make_unique<SharedImageRepresentationSkiaVkAHB>(
manager, this, std::move(context_state), std::move(promise_texture),
manager, this, std::move(context_state), std::move(vulkan_image),
tracker);
}
DCHECK(context_state->GrContextIsGL());
......
......@@ -24,6 +24,7 @@
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_fence_helper.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
#include "gpu/vulkan/vulkan_image.h"
#include "gpu/vulkan/vulkan_implementation.h"
#include "gpu/vulkan/vulkan_util.h"
#include "third_party/skia/include/core/SkPromiseImageTexture.h"
......@@ -32,88 +33,6 @@
namespace gpu {
namespace {
sk_sp<SkPromiseImageTexture> CreatePromiseTextureVideo(
viz::VulkanContextProvider* context_provider,
base::android::ScopedHardwareBufferHandle ahb_handle,
gfx::Size size,
viz::ResourceFormat format) {
VulkanImplementation* vk_implementation =
context_provider->GetVulkanImplementation();
VkDevice vk_device = context_provider->GetDeviceQueue()->GetVulkanDevice();
VkPhysicalDevice vk_physical_device =
context_provider->GetDeviceQueue()->GetVulkanPhysicalDevice();
// Create a VkImage and import AHB.
VkImage vk_image;
VkImageCreateInfo vk_image_info;
VkDeviceMemory vk_device_memory;
VkDeviceSize mem_allocation_size;
VulkanYCbCrInfo ycbcr_info;
if (!vk_implementation->CreateVkImageAndImportAHB(
vk_device, vk_physical_device, size, std::move(ahb_handle), &vk_image,
&vk_image_info, &vk_device_memory, &mem_allocation_size,
&ycbcr_info)) {
return nullptr;
}
// We always use VK_IMAGE_TILING_OPTIMAL while creating the vk image in
// VulkanImplementationAndroid::CreateVkImageAndImportAHB. Hence pass the
// tiling parameter as VK_IMAGE_TILING_OPTIMAL to below call rather than
// passing |vk_image_info.tiling|. This is also to ensure that the promise
// image created here at [1] as well the fullfil image created via the current
// function call are consistent and both are using VK_IMAGE_TILING_OPTIMAL.
// [1] -
// https://cs.chromium.org/chromium/src/components/viz/service/display_embedder/skia_output_surface_impl.cc?rcl=db5ffd448ba5d66d9d3c5c099754e5067c752465&l=789.
DCHECK_EQ(static_cast<int32_t>(vk_image_info.tiling),
static_cast<int32_t>(VK_IMAGE_TILING_OPTIMAL));
GrVkYcbcrConversionInfo gr_ycbcr_info = CreateGrVkYcbcrConversionInfo(
vk_physical_device, VK_IMAGE_TILING_OPTIMAL, ycbcr_info);
// Create backend texture from the VkImage.
GrVkAlloc alloc = {vk_device_memory, 0, mem_allocation_size, 0};
GrVkImageInfo vk_info = {vk_image,
alloc,
vk_image_info.tiling,
vk_image_info.initialLayout,
vk_image_info.format,
vk_image_info.mipLevels,
VK_QUEUE_FAMILY_EXTERNAL,
GrProtected::kNo,
gr_ycbcr_info};
// TODO(bsalomon): Determine whether it makes sense to attempt to reuse this
// if the vk_info stays the same on subsequent calls.
auto promise_texture = SkPromiseImageTexture::Make(
GrBackendTexture(size.width(), size.height(), vk_info));
if (!promise_texture) {
vkDestroyImage(vk_device, vk_image, nullptr);
vkFreeMemory(vk_device, vk_device_memory, nullptr);
return nullptr;
}
return promise_texture;
}
void DestroyVkPromiseTextureVideo(
viz::VulkanContextProvider* context_provider,
sk_sp<SkPromiseImageTexture> promise_texture) {
DCHECK(promise_texture);
DCHECK(promise_texture->unique());
GrVkImageInfo vk_image_info;
bool result =
promise_texture->backendTexture().getVkImageInfo(&vk_image_info);
DCHECK(result);
VulkanFenceHelper* fence_helper =
context_provider->GetDeviceQueue()->GetFenceHelper();
fence_helper->EnqueueImageCleanupForSubmittedWork(
vk_image_info.fImage, vk_image_info.fAlloc.fMemory);
}
} // namespace
SharedImageVideo::SharedImageVideo(
const Mailbox& mailbox,
const gfx::Size& size,
......@@ -299,10 +218,13 @@ class SharedImageRepresentationVideoSkiaVk
DCHECK(end_access_semaphore_ == VK_NULL_HANDLE);
// |promise_texture_| could be null if we never being read.
if (!promise_texture_)
if (!vulkan_image_)
return;
DestroyVkPromiseTextureVideo(context_state_->vk_context_provider(),
std::move(promise_texture_));
VulkanFenceHelper* fence_helper = context_state_->vk_context_provider()
->GetDeviceQueue()
->GetFenceHelper();
fence_helper->EnqueueVulkanObjectCleanupForSubmittedWork(
std::move(vulkan_image_));
}
sk_sp<SkSurface> BeginWriteAccess(
......@@ -342,11 +264,46 @@ class SharedImageRepresentationVideoSkiaVk
return nullptr;
}
if (!promise_texture_) {
// Create the promise texture.
promise_texture_ = CreatePromiseTextureVideo(
context_state_->vk_context_provider(),
scoped_hardware_buffer_->TakeBuffer(), size(), format());
if (!vulkan_image_) {
DCHECK(!promise_texture_);
gfx::GpuMemoryBufferHandle gmb_handle(
scoped_hardware_buffer_->TakeBuffer());
auto* device_queue =
context_state_->vk_context_provider()->GetDeviceQueue();
vulkan_image_ = VulkanImage::CreateFromGpuMemoryBufferHandle(
device_queue, std::move(gmb_handle), size(), ToVkFormat(format()),
0 /* usage */);
if (!vulkan_image_)
return nullptr;
// We always use VK_IMAGE_TILING_OPTIMAL while creating the vk image in
// VulkanImplementationAndroid::CreateVkImageAndImportAHB. Hence pass the
// tiling parameter as VK_IMAGE_TILING_OPTIMAL to below call rather than
// passing |vk_image_info.tiling|. This is also to ensure that the promise
// image created here at [1] as well the fullfil image created via the
// current function call are consistent and both are using
// VK_IMAGE_TILING_OPTIMAL. [1] -
// https://cs.chromium.org/chromium/src/components/viz/service/display_embedder/skia_output_surface_impl.cc?rcl=db5ffd448ba5d66d9d3c5c099754e5067c752465&l=789.
DCHECK_EQ(static_cast<int32_t>(vulkan_image_->image_tiling()),
static_cast<int32_t>(VK_IMAGE_TILING_OPTIMAL));
GrVkYcbcrConversionInfo gr_ycbcr_info = CreateGrVkYcbcrConversionInfo(
device_queue->GetVulkanPhysicalDevice(), VK_IMAGE_TILING_OPTIMAL,
vulkan_image_->ycbcr_info());
// Create backend texture from the VkImage.
GrVkAlloc alloc(vulkan_image_->device_memory(), 0 /* offset */,
vulkan_image_->device_size(), 0 /* flags */);
GrVkImageInfo vk_info(vulkan_image_->image(), alloc,
vulkan_image_->image_tiling(),
VK_IMAGE_LAYOUT_UNDEFINED, vulkan_image_->format(),
1 /* levelCount */, VK_QUEUE_FAMILY_EXTERNAL,
GrProtected::kNo, gr_ycbcr_info);
// TODO(bsalomon): Determine whether it makes sense to attempt to reuse
// this if the vk_info stays the same on subsequent calls.
promise_texture_ = SkPromiseImageTexture::Make(
GrBackendTexture(size().width(), size().height(), vk_info));
DCHECK(promise_texture_);
}
return promise_texture_;
}
......@@ -427,6 +384,7 @@ class SharedImageRepresentationVideoSkiaVk
->GetFenceHelper();
}
std::unique_ptr<VulkanImage> vulkan_image_;
sk_sp<SkPromiseImageTexture> promise_texture_;
scoped_refptr<SharedContextState> context_state_;
std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
......
......@@ -9,6 +9,7 @@
#include "components/viz/common/gpu/vulkan_in_process_context_provider.h"
#include "gpu/vulkan/android/vulkan_implementation_android.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
#include "gpu/vulkan/vulkan_image.h"
#include "gpu/vulkan/vulkan_util.h"
#include "testing/gtest/include/gtest/gtest.h"
......@@ -127,18 +128,15 @@ TEST_F(VulkanImplementationAndroidTest, CreateVkImageFromAHB) {
// Create a vkimage and import the AHB into it.
const gfx::Size size(hwb_desc.width, hwb_desc.height);
VkImage vk_image;
VkImageCreateInfo vk_image_info;
VkDeviceMemory vk_device_memory;
VkDeviceSize mem_allocation_size;
EXPECT_TRUE(vk_implementation_->CreateVkImageAndImportAHB(
vk_device_, vk_phy_device_, size,
base::android::ScopedHardwareBufferHandle::Adopt(buffer), &vk_image,
&vk_image_info, &vk_device_memory, &mem_allocation_size));
// Free up resources.
vkDestroyImage(vk_device_, vk_image, nullptr);
vkFreeMemory(vk_device_, vk_device_memory, nullptr);
auto* device_queue = vk_context_provider_->GetDeviceQueue();
auto handle = base::android::ScopedHardwareBufferHandle::Adopt(buffer);
gfx::GpuMemoryBufferHandle gmb_handle(std::move(handle));
auto vulkan_image = VulkanImage::CreateFromGpuMemoryBufferHandle(
device_queue, std::move(gmb_handle), size, VK_FORMAT_R8G8B8A8_UNORM,
0 /* usage */);
EXPECT_TRUE(vulkan_image);
vulkan_image->Destroy();
}
} // namespace gpu
......@@ -49,16 +49,6 @@ class COMPONENT_EXPORT(VULKAN_ANDROID) VulkanImplementationAndroid
gfx::GpuMemoryBufferHandle gmb_handle,
gfx::Size size,
VkFormat vk_formae) override;
bool CreateVkImageAndImportAHB(
const VkDevice& vk_device,
const VkPhysicalDevice& vk_physical_device,
const gfx::Size& size,
base::android::ScopedHardwareBufferHandle ahb_handle,
VkImage* vk_image,
VkImageCreateInfo* vk_image_info,
VkDeviceMemory* vk_device_memory,
VkDeviceSize* mem_allocation_size,
VulkanYCbCrInfo* ycbcr_info) override;
bool GetSamplerYcbcrConversionInfo(
const VkDevice& vk_device,
base::android::ScopedHardwareBufferHandle ahb_handle,
......
......@@ -19,19 +19,23 @@
#include "gpu/vulkan/fuchsia/vulkan_fuchsia_ext.h"
#endif
#if defined(OS_ANDROID)
#include "base/android/android_hardware_buffer_compat.h"
#endif
namespace gpu {
namespace {
base::Optional<uint32_t> FindMemoryTypeIndex(
VkPhysicalDevice physical_device,
const VkMemoryRequirements& requirements,
const VkMemoryRequirements* requirements,
VkMemoryPropertyFlags flags) {
VkPhysicalDeviceMemoryProperties properties;
vkGetPhysicalDeviceMemoryProperties(physical_device, &properties);
constexpr uint32_t kMaxIndex = 31;
for (uint32_t i = 0; i <= kMaxIndex; i++) {
if (((1u << i) & requirements.memoryTypeBits) == 0)
if (((1u << i) & requirements->memoryTypeBits) == 0)
continue;
if ((properties.memoryTypes[i].propertyFlags & flags) != flags)
continue;
......@@ -56,7 +60,8 @@ std::unique_ptr<VulkanImage> VulkanImage::Create(
auto image = std::make_unique<VulkanImage>(util::PassKey<VulkanImage>());
if (!image->Initialize(device_queue, size, format, usage, flags, image_tiling,
vk_image_create_info_next,
vk_memory_allocation_info_next)) {
vk_memory_allocation_info_next,
nullptr /* requirements */)) {
return nullptr;
}
return image;
......@@ -78,6 +83,24 @@ std::unique_ptr<VulkanImage> VulkanImage::CreateWithExternalMemory(
return image;
}
// static
std::unique_ptr<VulkanImage> VulkanImage::CreateFromGpuMemoryBufferHandle(
VulkanDeviceQueue* device_queue,
gfx::GpuMemoryBufferHandle gmb_handle,
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageTiling image_tiling) {
auto image = std::make_unique<VulkanImage>(util::PassKey<VulkanImage>());
if (!image->InitializeFromGpuMemoryBufferHandle(
device_queue, std::move(gmb_handle), size, format, usage, flags,
image_tiling)) {
return nullptr;
}
return image;
}
// static
std::unique_ptr<VulkanImage> VulkanImage::Create(
VulkanDeviceQueue* device_queue,
......@@ -177,7 +200,8 @@ bool VulkanImage::Initialize(VulkanDeviceQueue* device_queue,
VkImageCreateFlags flags,
VkImageTiling image_tiling,
void* vk_image_create_info_next,
void* vk_memory_allocation_info_next) {
void* vk_memory_allocation_info_next,
const VkMemoryRequirements* requirements) {
DCHECK(!device_queue_);
DCHECK(image_ == VK_NULL_HANDLE);
DCHECK(device_memory_ == VK_NULL_HANDLE);
......@@ -213,15 +237,18 @@ bool VulkanImage::Initialize(VulkanDeviceQueue* device_queue,
return false;
}
VkMemoryRequirements requirements;
vkGetImageMemoryRequirements(vk_device, image_, &requirements);
if (!requirements.memoryTypeBits) {
VkMemoryRequirements tmp_requirements;
if (!requirements) {
vkGetImageMemoryRequirements(vk_device, image_, &tmp_requirements);
if (!tmp_requirements.memoryTypeBits) {
DLOG(ERROR) << "vkGetImageMemoryRequirements failed";
Destroy();
return false;
}
requirements = &tmp_requirements;
}
device_size_ = requirements.size;
device_size_ = requirements->size;
// Some vulkan implementations require dedicated memory for sharing memory
// object between vulkan instances.
......@@ -336,8 +363,139 @@ bool VulkanImage::InitializeWithExternalMemory(VulkanDeviceQueue* device_queue,
};
return Initialize(device_queue, size, format, usage, flags, image_tiling,
&external_image_create_info,
&external_memory_allocate_info);
} // namespace gpu
&external_image_create_info, &external_memory_allocate_info,
nullptr /* requirements */);
}
bool VulkanImage::InitializeFromGpuMemoryBufferHandle(
VulkanDeviceQueue* device_queue,
gfx::GpuMemoryBufferHandle gmb_handle,
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageTiling image_tiling) {
#if !defined(OS_ANDROID)
NOTIMPLEMENTED();
return false;
#else
// TODO(penghuang): Move this method to vulkan_image_android.cc when more
// platforms are supported.
if (gmb_handle.type != gfx::GpuMemoryBufferType::ANDROID_HARDWARE_BUFFER) {
DLOG(ERROR) << "gmb_handle.type is not supported. type:" << gmb_handle.type;
return false;
}
DCHECK(gmb_handle.android_hardware_buffer.is_valid());
auto& ahb_handle = gmb_handle.android_hardware_buffer;
// To obtain format properties of an Android hardware buffer, include an
// instance of VkAndroidHardwareBufferFormatPropertiesANDROID in the pNext
// chain of the VkAndroidHardwareBufferPropertiesANDROID instance passed to
// vkGetAndroidHardwareBufferPropertiesANDROID.
VkAndroidHardwareBufferFormatPropertiesANDROID ahb_format_props = {
VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
};
VkAndroidHardwareBufferPropertiesANDROID ahb_props = {
.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
.pNext = &ahb_format_props,
};
VkDevice vk_device = device_queue->GetVulkanDevice();
VkResult result = vkGetAndroidHardwareBufferPropertiesANDROID(
vk_device, ahb_handle.get(), &ahb_props);
if (result != VK_SUCCESS) {
LOG(ERROR)
<< "GetAhbProps: vkGetAndroidHardwareBufferPropertiesANDROID failed : "
<< result;
return false;
}
// To create an image with an external format, include an instance of
// VkExternalFormatANDROID in the pNext chain of VkImageCreateInfo.
VkExternalFormatANDROID external_format = {
.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
// If externalFormat is zero, the effect is as if the
// VkExternalFormatANDROID structure was not present. Otherwise, the image
// will have the specified external format.
.externalFormat = 0,
};
// If image has an external format, format must be VK_FORMAT_UNDEFINED.
if (ahb_format_props.format == VK_FORMAT_UNDEFINED) {
// externalFormat must be 0 or a value returned in the externalFormat member
// of VkAndroidHardwareBufferFormatPropertiesANDROID by an earlier call to
// vkGetAndroidHardwareBufferPropertiesANDROID.
external_format.externalFormat = ahb_format_props.externalFormat;
}
// To define a set of external memory handle types that may be used as backing
// store for an image, add a VkExternalMemoryImageCreateInfo structure to the
// pNext chain of the VkImageCreateInfo structure.
VkExternalMemoryImageCreateInfo external_memory_image_info = {
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
.pNext = &external_format,
.handleTypes =
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
};
// Get the AHB description.
AHardwareBuffer_Desc ahb_desc = {};
base::AndroidHardwareBufferCompat::GetInstance().Describe(ahb_handle.get(),
&ahb_desc);
// Intended usage of the image.
VkImageUsageFlags usage_flags = 0;
// Get Vulkan Image usage flag equivalence of AHB usage.
if (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE) {
usage_flags = usage_flags | VK_IMAGE_USAGE_SAMPLED_BIT |
VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
}
if (ahb_desc.usage & AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT) {
usage_flags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
}
// TODO(vikassoni) : AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP is supported from API
// level 28 which is not part of current android_ndk version in chromium. Add
// equivalent VK usage later.
if (!usage_flags) {
LOG(ERROR) << "No valid usage flags found";
return false;
}
VkImageCreateFlags create_flags = 0;
if (ahb_desc.usage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT) {
create_flags = VK_IMAGE_CREATE_PROTECTED_BIT;
}
// To import memory created outside of the current Vulkan instance from an
// Android hardware buffer, add a VkImportAndroidHardwareBufferInfoANDROID
// structure to the pNext chain of the VkMemoryAllocateInfo structure.
VkImportAndroidHardwareBufferInfoANDROID ahb_import_info = {
.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
.buffer = ahb_handle.get(),
};
VkMemoryRequirements requirements = {
.size = ahb_props.allocationSize,
.memoryTypeBits = ahb_props.memoryTypeBits,
};
if (!Initialize(device_queue, size, ahb_format_props.format, usage_flags,
create_flags, VK_IMAGE_TILING_OPTIMAL,
&external_memory_image_info, &ahb_import_info,
&requirements)) {
return false;
}
if (ahb_format_props.format == VK_FORMAT_UNDEFINED) {
ycbcr_info_.emplace(VK_FORMAT_UNDEFINED, ahb_format_props.externalFormat,
ahb_format_props.suggestedYcbcrModel,
ahb_format_props.suggestedYcbcrRange,
ahb_format_props.suggestedXChromaOffset,
ahb_format_props.suggestedYChromaOffset,
ahb_format_props.formatFeatures);
}
return true;
#endif // defined(OS_ANDROID)
}
} // namespace gpu
......@@ -14,6 +14,7 @@
#include "gpu/ipc/common/vulkan_ycbcr_info.h"
#include "gpu/vulkan/vulkan_export.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
#if defined(OS_FUCHSIA)
#include <lib/zx/vmo.h>
......@@ -48,8 +49,17 @@ class VULKAN_EXPORT VulkanImage {
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageTiling image_tiling);
VkImageCreateFlags flags = 0,
VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL);
static std::unique_ptr<VulkanImage> CreateFromGpuMemoryBufferHandle(
VulkanDeviceQueue* device_queue,
gfx::GpuMemoryBufferHandle gmb_handle,
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags = 0,
VkImageTiling image_tiling = VK_IMAGE_TILING_OPTIMAL);
static std::unique_ptr<VulkanImage> Create(
VulkanDeviceQueue* device_queue,
......@@ -93,13 +103,22 @@ class VULKAN_EXPORT VulkanImage {
VkImageCreateFlags flags,
VkImageTiling image_tiling,
void* image_create_info_next,
void* memory_allocation_info_next);
void* memory_allocation_info_next,
const VkMemoryRequirements* requirements);
bool InitializeWithExternalMemory(VulkanDeviceQueue* device_queue,
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageTiling image_tiling);
bool InitializeFromGpuMemoryBufferHandle(
VulkanDeviceQueue* device_queue,
gfx::GpuMemoryBufferHandle gmb_handle,
const gfx::Size& size,
VkFormat format,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageTiling image_tiling);
VulkanDeviceQueue* device_queue_ = nullptr;
gfx::Size size_;
......
......@@ -117,21 +117,6 @@ class VULKAN_EXPORT VulkanImplementation {
VkFormat vk_formae) = 0;
#if defined(OS_ANDROID)
// Create a VkImage, import Android AHardwareBuffer object created outside of
// the Vulkan device into Vulkan memory object and bind it to the VkImage.
// TODO(sergeyu): Remove this method and use
// CreateVkImageFromGpuMemoryHandle() instead.
virtual bool CreateVkImageAndImportAHB(
const VkDevice& vk_device,
const VkPhysicalDevice& vk_physical_device,
const gfx::Size& size,
base::android::ScopedHardwareBufferHandle ahb_handle,
VkImage* vk_image,
VkImageCreateInfo* vk_image_info,
VkDeviceMemory* vk_device_memory,
VkDeviceSize* mem_allocation_size,
VulkanYCbCrInfo* ycbcr_info = nullptr) = 0;
// Get the sampler ycbcr conversion information from the AHB.
virtual bool GetSamplerYcbcrConversionInfo(
const VkDevice& vk_device,
......
......@@ -8,8 +8,14 @@
namespace gfx {
GpuMemoryBufferHandle::GpuMemoryBufferHandle()
: type(EMPTY_BUFFER), id(0), offset(0), stride(0) {}
GpuMemoryBufferHandle::GpuMemoryBufferHandle() = default;
#if defined(OS_ANDROID)
GpuMemoryBufferHandle::GpuMemoryBufferHandle(
base::android::ScopedHardwareBufferHandle handle)
: type(GpuMemoryBufferType::ANDROID_HARDWARE_BUFFER),
android_hardware_buffer(std::move(handle)) {}
#endif
// TODO(crbug.com/863011): Reset |type| and possibly the handles on the
// moved-from object.
......
......@@ -55,16 +55,20 @@ using GpuMemoryBufferId = GenericSharedMemoryId;
// time and it corresponds to |type|.
struct GFX_EXPORT GpuMemoryBufferHandle {
GpuMemoryBufferHandle();
#if defined(OS_ANDROID)
explicit GpuMemoryBufferHandle(
base::android::ScopedHardwareBufferHandle handle);
#endif
GpuMemoryBufferHandle(GpuMemoryBufferHandle&& other);
GpuMemoryBufferHandle& operator=(GpuMemoryBufferHandle&& other);
~GpuMemoryBufferHandle();
GpuMemoryBufferHandle Clone() const;
bool is_null() const { return type == EMPTY_BUFFER; }
GpuMemoryBufferType type;
GpuMemoryBufferId id;
GpuMemoryBufferType type = GpuMemoryBufferType::EMPTY_BUFFER;
GpuMemoryBufferId id{0};
base::UnsafeSharedMemoryRegion region;
uint32_t offset;
int32_t stride;
uint32_t offset = 0;
int32_t stride = 0;
#if defined(OS_LINUX) || defined(OS_FUCHSIA)
NativePixmapHandle native_pixmap_handle;
#elif defined(OS_MACOSX) && !defined(OS_IOS)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment