Commit 3913758d authored by Peng Huang's avatar Peng Huang Committed by Commit Bot

Remove not used code from //gpu/vulkan

Chrome will use Skia instead of using Vulkan API directly. So some code
will not be used anymore, so remove them.

Bug: 876825
Cq-Include-Trybots: luci.chromium.try:android_optional_gpu_tests_rel;luci.chromium.try:linux_optional_gpu_tests_rel;luci.chromium.try:mac_optional_gpu_tests_rel;luci.chromium.try:win_optional_gpu_tests_rel
Change-Id: Ia572cfa0937a3d09e139f88df22cff85cfb67abb
Reviewed-on: https://chromium-review.googlesource.com/1204921
Commit-Queue: Peng Huang <penghuang@chromium.org>
Reviewed-by: default avatarAntoine Labour <piman@chromium.org>
Cr-Commit-Position: refs/heads/master@{#589928}
parent 06cf8562
...@@ -22,29 +22,14 @@ if (enable_vulkan) { ...@@ -22,29 +22,14 @@ if (enable_vulkan) {
"vulkan_command_buffer.h", "vulkan_command_buffer.h",
"vulkan_command_pool.cc", "vulkan_command_pool.cc",
"vulkan_command_pool.h", "vulkan_command_pool.h",
"vulkan_descriptor_layout.cc",
"vulkan_descriptor_layout.h",
"vulkan_descriptor_pool.cc",
"vulkan_descriptor_pool.h",
"vulkan_descriptor_set.cc",
"vulkan_descriptor_set.h",
"vulkan_device_queue.cc", "vulkan_device_queue.cc",
"vulkan_device_queue.h", "vulkan_device_queue.h",
"vulkan_export.h", "vulkan_export.h",
"vulkan_function_pointers.cc", "vulkan_function_pointers.cc",
"vulkan_function_pointers.h", "vulkan_function_pointers.h",
"vulkan_image_view.cc",
"vulkan_image_view.h",
"vulkan_implementation.cc", "vulkan_implementation.cc",
"vulkan_implementation.h", "vulkan_implementation.h",
"vulkan_instance.cc", "vulkan_instance.cc",
"vulkan_instance.h",
"vulkan_render_pass.cc",
"vulkan_render_pass.h",
"vulkan_sampler.cc",
"vulkan_sampler.h",
"vulkan_shader_module.cc",
"vulkan_shader_module.h",
"vulkan_surface.cc", "vulkan_surface.cc",
"vulkan_surface.h", "vulkan_surface.h",
"vulkan_swap_chain.cc", "vulkan_swap_chain.cc",
......
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
#include "gpu/vulkan/tests/basic_vulkan_test.h" #include "gpu/vulkan/tests/basic_vulkan_test.h"
#include "gpu/vulkan/vulkan_command_buffer.h" #include "gpu/vulkan/vulkan_command_buffer.h"
#include "gpu/vulkan/vulkan_render_pass.h"
#include "gpu/vulkan/vulkan_surface.h" #include "gpu/vulkan/vulkan_surface.h"
#include "gpu/vulkan/vulkan_swap_chain.h" #include "gpu/vulkan/vulkan_swap_chain.h"
...@@ -39,64 +38,4 @@ TEST_F(BasicVulkanTest, EmptyVulkanSwaps) { ...@@ -39,64 +38,4 @@ TEST_F(BasicVulkanTest, EmptyVulkanSwaps) {
surface->Destroy(); surface->Destroy();
} }
TEST_F(BasicVulkanTest, BasicRenderPass) {
std::unique_ptr<VulkanSurface> surface = CreateViewSurface(window());
ASSERT_TRUE(surface);
ASSERT_TRUE(surface->Initialize(GetDeviceQueue(),
VulkanSurface::DEFAULT_SURFACE_FORMAT));
VulkanSwapChain* swap_chain = surface->GetSwapChain();
VulkanRenderPass::RenderPassData render_pass_data;
// There is a single attachment which transitions present -> color -> present.
render_pass_data.attachments.resize(1);
VulkanRenderPass::AttachmentData* attachment =
&render_pass_data.attachments[0];
attachment->attachment_type =
VulkanRenderPass::AttachmentType::ATTACHMENT_TYPE_SWAP_IMAGE;
attachment->sample_count = VK_SAMPLE_COUNT_1_BIT;
attachment->load_op = VK_ATTACHMENT_LOAD_OP_LOAD;
attachment->store_op = VK_ATTACHMENT_STORE_OP_STORE;
attachment->stencil_load_op = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachment->stencil_store_op = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachment->start_layout =
VulkanRenderPass::ImageLayoutType::IMAGE_LAYOUT_TYPE_PRESENT;
attachment->end_layout =
VulkanRenderPass::ImageLayoutType::IMAGE_LAYOUT_TYPE_PRESENT;
// Single subpass.
render_pass_data.subpass_datas.resize(1);
VulkanRenderPass::SubpassData* subpass_data =
&render_pass_data.subpass_datas[0];
// Our subpass will handle the transition to Color.
subpass_data->subpass_attachments.resize(1);
VulkanRenderPass::SubpassAttachment* subpass_attachment =
&subpass_data->subpass_attachments[0];
subpass_attachment->attachment_index = 0;
subpass_attachment->subpass_layout =
VulkanRenderPass::ImageLayoutType::IMAGE_LAYOUT_TYPE_IMAGE_VIEW;
ASSERT_TRUE(render_pass_data.ValidateData(swap_chain));
VulkanRenderPass render_pass(GetDeviceQueue());
EXPECT_TRUE(render_pass.Initialize(swap_chain, render_pass_data));
for (int i = 0; i < 10; ++i) {
VulkanCommandBuffer* command_buffer = swap_chain->GetCurrentCommandBuffer();
{
ScopedSingleUseCommandBufferRecorder recorder(*command_buffer);
render_pass.BeginRenderPass(recorder, true);
render_pass.EndRenderPass(recorder);
}
EXPECT_EQ(gfx::SwapResult::SWAP_ACK, surface->SwapBuffers());
}
surface->Finish();
render_pass.Destroy();
surface->Destroy();
}
} // namespace gpu } // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_descriptor_layout.h"
#include "base/logging.h"
#include "gpu/vulkan/vulkan_descriptor_pool.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
VulkanDescriptorLayout::VulkanDescriptorLayout(VulkanDeviceQueue* device_queue)
: device_queue_(device_queue) {}
VulkanDescriptorLayout::~VulkanDescriptorLayout() {
DCHECK_EQ(static_cast<VkDescriptorSetLayout>(VK_NULL_HANDLE), handle_);
}
bool VulkanDescriptorLayout::Initialize(
const std::vector<VkDescriptorSetLayoutBinding>& layout) {
VkResult result = VK_SUCCESS;
VkDevice device = device_queue_->GetVulkanDevice();
VkDescriptorSetLayoutCreateInfo layout_create_info = {};
layout_create_info.sType =
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
layout_create_info.bindingCount = static_cast<uint32_t>(layout.size());
layout_create_info.pBindings = layout.data();
result = vkCreateDescriptorSetLayout(device, &layout_create_info, nullptr,
&handle_);
if (VK_SUCCESS != result) {
DLOG(ERROR) << "vkCreateDescriptorSetLayout() failed: " << result;
return false;
}
return true;
}
void VulkanDescriptorLayout::Destroy() {
if (VK_NULL_HANDLE != handle_) {
vkDestroyDescriptorSetLayout(device_queue_->GetVulkanDevice(), handle_,
nullptr);
handle_ = VK_NULL_HANDLE;
}
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_DESCRIPTOR_LAYOUT_H_
#define GPU_VULKAN_VULKAN_DESCRIPTOR_LAYOUT_H_
#include <vulkan/vulkan.h>
#include <memory>
#include <vector>
#include "base/macros.h"
#include "gpu/vulkan/vulkan_export.h"
namespace gpu {
class VulkanDeviceQueue;
class VULKAN_EXPORT VulkanDescriptorLayout {
public:
explicit VulkanDescriptorLayout(VulkanDeviceQueue* device_queue);
~VulkanDescriptorLayout();
bool Initialize(const std::vector<VkDescriptorSetLayoutBinding>& layout);
void Destroy();
VkDescriptorSetLayout handle() const { return handle_; }
private:
VulkanDeviceQueue* device_queue_ = nullptr;
VkDescriptorSetLayout handle_ = VK_NULL_HANDLE;
DISALLOW_COPY_AND_ASSIGN(VulkanDescriptorLayout);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_DESCRIPTOR_LAYOUT_H_
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_descriptor_pool.h"
#include "base/logging.h"
#include "gpu/vulkan/vulkan_descriptor_set.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
VulkanDescriptorPool::VulkanDescriptorPool(VulkanDeviceQueue* device_queue)
: device_queue_(device_queue) {}
VulkanDescriptorPool::~VulkanDescriptorPool() {
DCHECK_EQ(static_cast<VkDescriptorPool>(VK_NULL_HANDLE), handle_);
DCHECK_EQ(0u, descriptor_count_);
}
bool VulkanDescriptorPool::Initialize(
uint32_t max_descriptor_sets,
const std::vector<VkDescriptorPoolSize>& pool_sizes) {
DCHECK_EQ(static_cast<VkDescriptorPool>(VK_NULL_HANDLE), handle_);
max_descriptor_sets_ = max_descriptor_sets;
VkDescriptorPoolCreateInfo descriptor_pool_create_info = {};
descriptor_pool_create_info.sType =
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptor_pool_create_info.flags =
VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
descriptor_pool_create_info.maxSets = max_descriptor_sets;
descriptor_pool_create_info.poolSizeCount =
static_cast<uint32_t>(pool_sizes.size());
descriptor_pool_create_info.pPoolSizes = pool_sizes.data();
VkResult result =
vkCreateDescriptorPool(device_queue_->GetVulkanDevice(),
&descriptor_pool_create_info, nullptr, &handle_);
if (VK_SUCCESS != result) {
DLOG(ERROR) << "vkCreateDescriptorPool() failed: " << result;
return false;
}
return true;
}
void VulkanDescriptorPool::Destroy() {
DCHECK_EQ(0u, descriptor_count_);
if (VK_NULL_HANDLE != handle_) {
vkDestroyDescriptorPool(device_queue_->GetVulkanDevice(), handle_, nullptr);
handle_ = VK_NULL_HANDLE;
}
max_descriptor_sets_ = 0;
}
std::unique_ptr<VulkanDescriptorSet> VulkanDescriptorPool::CreateDescriptorSet(
const VulkanDescriptorLayout* layout) {
std::unique_ptr<VulkanDescriptorSet> descriptor_set(
new VulkanDescriptorSet(device_queue_, this));
if (!descriptor_set->Initialize(layout)) {
return nullptr;
}
return descriptor_set;
}
void VulkanDescriptorPool::IncrementDescriptorSetCount() {
DCHECK_LT(descriptor_count_, max_descriptor_sets_);
descriptor_count_++;
}
void VulkanDescriptorPool::DecrementDescriptorSetCount() {
DCHECK_LT(0u, descriptor_count_);
descriptor_count_--;
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_DESCRIPTOR_POOL_H_
#define GPU_VULKAN_VULKAN_DESCRIPTOR_POOL_H_
#include <vulkan/vulkan.h>
#include <memory>
#include <vector>
#include "base/macros.h"
namespace gpu {
class VulkanDescriptorLayout;
class VulkanDescriptorSet;
class VulkanDeviceQueue;
class VulkanDescriptorPool {
public:
explicit VulkanDescriptorPool(VulkanDeviceQueue* device_queue);
~VulkanDescriptorPool();
bool Initialize(uint32_t max_descriptor_sets,
const std::vector<VkDescriptorPoolSize>& pool_sizes);
void Destroy();
std::unique_ptr<VulkanDescriptorSet> CreateDescriptorSet(
const VulkanDescriptorLayout* layout);
VkDescriptorPool handle() { return handle_; }
private:
friend class VulkanDescriptorSet;
void IncrementDescriptorSetCount();
void DecrementDescriptorSetCount();
VulkanDeviceQueue* device_queue_ = nullptr;
VkDescriptorPool handle_ = VK_NULL_HANDLE;
uint32_t max_descriptor_sets_ = 0;
uint32_t descriptor_count_ = 0;
DISALLOW_COPY_AND_ASSIGN(VulkanDescriptorPool);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_DESCRIPTOR_POOL_H_
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_descriptor_set.h"
#include "base/logging.h"
#include "gpu/vulkan/vulkan_descriptor_layout.h"
#include "gpu/vulkan/vulkan_descriptor_pool.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
VulkanDescriptorSet::~VulkanDescriptorSet() {
DCHECK_EQ(static_cast<VkDescriptorSet>(VK_NULL_HANDLE), handle_);
descriptor_pool_->DecrementDescriptorSetCount();
}
bool VulkanDescriptorSet::Initialize(const VulkanDescriptorLayout* layout) {
VkResult result = VK_SUCCESS;
VkDevice device = device_queue_->GetVulkanDevice();
VkDescriptorSetLayout layout_handle = layout->handle();
VkDescriptorSetAllocateInfo set_allocate_info = {};
set_allocate_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
set_allocate_info.descriptorPool = descriptor_pool_->handle();
set_allocate_info.descriptorSetCount = 1;
set_allocate_info.pSetLayouts = &layout_handle;
result = vkAllocateDescriptorSets(device, &set_allocate_info, &handle_);
if (VK_SUCCESS != result) {
DLOG(ERROR) << "vkAllocateDescriptorSets() failed: " << result;
return false;
}
return true;
}
void VulkanDescriptorSet::Destroy() {
VkDevice device = device_queue_->GetVulkanDevice();
if (VK_NULL_HANDLE != handle_) {
vkFreeDescriptorSets(device, descriptor_pool_->handle(), 1, &handle_);
handle_ = VK_NULL_HANDLE;
}
}
void VulkanDescriptorSet::WriteToDescriptorSet(
uint32_t dst_binding,
uint32_t dst_array_element,
uint32_t descriptor_count,
VkDescriptorType descriptor_type,
const VkDescriptorImageInfo* image_info,
const VkDescriptorBufferInfo* buffer_info,
const VkBufferView* texel_buffer_view) {
VkWriteDescriptorSet write_descriptor_set = {};
write_descriptor_set.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_descriptor_set.dstSet = handle_;
write_descriptor_set.dstBinding = dst_binding;
write_descriptor_set.dstArrayElement = dst_array_element;
write_descriptor_set.descriptorCount = descriptor_count;
write_descriptor_set.descriptorType = descriptor_type;
write_descriptor_set.pImageInfo = image_info;
write_descriptor_set.pBufferInfo = buffer_info;
write_descriptor_set.pTexelBufferView = texel_buffer_view;
vkUpdateDescriptorSets(device_queue_->GetVulkanDevice(), 1,
&write_descriptor_set, 0, nullptr);
}
void VulkanDescriptorSet::CopyFromDescriptorSet(
const VulkanDescriptorSet* source_set,
uint32_t src_binding,
uint32_t src_array_element,
uint32_t dst_binding,
uint32_t dst_array_element,
uint32_t descriptor_count) {
VkCopyDescriptorSet copy_descriptor_set = {};
copy_descriptor_set.sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
copy_descriptor_set.srcSet = source_set->handle();
copy_descriptor_set.srcBinding = src_binding;
copy_descriptor_set.srcArrayElement = src_array_element;
copy_descriptor_set.dstSet = handle_;
copy_descriptor_set.dstBinding = dst_binding;
copy_descriptor_set.dstArrayElement = dst_array_element;
copy_descriptor_set.descriptorCount = descriptor_count;
vkUpdateDescriptorSets(device_queue_->GetVulkanDevice(), 0, nullptr, 1,
&copy_descriptor_set);
}
VulkanDescriptorSet::VulkanDescriptorSet(VulkanDeviceQueue* device_queue,
VulkanDescriptorPool* descriptor_pool)
: device_queue_(device_queue), descriptor_pool_(descriptor_pool) {
descriptor_pool_->IncrementDescriptorSetCount();
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_DESCRIPTOR_SET_H_
#define GPU_VULKAN_VULKAN_DESCRIPTOR_SET_H_
#include <vulkan/vulkan.h>
#include <memory>
#include <vector>
#include "base/macros.h"
#include "gpu/vulkan/vulkan_export.h"
namespace gpu {
class VulkanDescriptorPool;
class VulkanDescriptorLayout;
class VulkanDeviceQueue;
class VULKAN_EXPORT VulkanDescriptorSet {
public:
~VulkanDescriptorSet();
bool Initialize(const VulkanDescriptorLayout* layout);
void Destroy();
void WriteToDescriptorSet(uint32_t dst_binding,
uint32_t dst_array_element,
uint32_t descriptor_count,
VkDescriptorType descriptor_type,
const VkDescriptorImageInfo* image_info,
const VkDescriptorBufferInfo* buffer_info,
const VkBufferView* texel_buffer_view);
void CopyFromDescriptorSet(const VulkanDescriptorSet* source_set,
uint32_t src_binding,
uint32_t src_array_element,
uint32_t dst_binding,
uint32_t dst_array_element,
uint32_t descriptor_count);
VkDescriptorSet handle() const { return handle_; }
private:
friend class VulkanDescriptorPool;
VulkanDescriptorSet(VulkanDeviceQueue* device_queue,
VulkanDescriptorPool* descriptor_pool);
VulkanDeviceQueue* device_queue_ = nullptr;
VulkanDescriptorPool* descriptor_pool_ = nullptr;
VkDescriptorSet handle_ = VK_NULL_HANDLE;
DISALLOW_COPY_AND_ASSIGN(VulkanDescriptorSet);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_DESCRIPTOR_SET_H_
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_image_view.h"
#include "base/logging.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
namespace {
const VkImageAspectFlags kAspectFlags[] = {
// IMAGE_TYPE_COLOR,
VK_IMAGE_ASPECT_COLOR_BIT,
// IMAGE_TYPE_DEPTH,
VK_IMAGE_ASPECT_DEPTH_BIT,
// IMAGE_TYPE_STENCIL,
VK_IMAGE_ASPECT_STENCIL_BIT,
// IMAGE_TYPE_DEPTH_STENCIL,
VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
};
static_assert(arraysize(kAspectFlags) == VulkanImageView::NUM_IMAGE_TYPES,
"Array size for kAspectFlags must match image types.");
} // namespace
VulkanImageView::VulkanImageView(VulkanDeviceQueue* device_queue)
: device_queue_(device_queue) {}
VulkanImageView::~VulkanImageView() {
DCHECK_EQ(static_cast<VkImageView>(VK_NULL_HANDLE), handle_);
DCHECK_EQ(IMAGE_TYPE_INVALID, image_type_);
}
bool VulkanImageView::Initialize(VkImage image,
VkImageViewType image_view_type,
ImageType image_type,
VkFormat format,
uint32_t width,
uint32_t height,
uint32_t base_mip_level,
uint32_t mip_levels,
uint32_t base_layer_level,
uint32_t num_layers) {
format_ = format;
DCHECK_GT(image_type, IMAGE_TYPE_INVALID);
DCHECK_LT(image_type, NUM_IMAGE_TYPES);
VkImageSubresourceRange image_subresource_range = {};
image_subresource_range.aspectMask = kAspectFlags[image_type];
image_subresource_range.baseMipLevel = base_mip_level;
image_subresource_range.levelCount = mip_levels;
image_subresource_range.baseArrayLayer = base_layer_level;
image_subresource_range.layerCount = num_layers;
VkImageViewCreateInfo image_view_create_info = {};
image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
image_view_create_info.image = image;
image_view_create_info.viewType = image_view_type;
image_view_create_info.format = format;
image_view_create_info.components = {
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
image_view_create_info.subresourceRange = image_subresource_range;
VkResult result =
vkCreateImageView(device_queue_->GetVulkanDevice(),
&image_view_create_info, nullptr, &handle_);
if (VK_SUCCESS != result) {
DLOG(ERROR) << "vkCreateImageView() failed: " << result;
return false;
}
image_type_ = image_type;
width_ = width;
height_ = height;
mip_levels_ = mip_levels;
layers_ = num_layers;
return true;
}
void VulkanImageView::Destroy() {
if (VK_NULL_HANDLE != handle_) {
vkDestroyImageView(device_queue_->GetVulkanDevice(), handle_, nullptr);
image_type_ = IMAGE_TYPE_INVALID;
handle_ = VK_NULL_HANDLE;
}
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_IMAGE_VIEW_H_
#define GPU_VULKAN_VULKAN_IMAGE_VIEW_H_
#include <vulkan/vulkan.h>
#include "base/macros.h"
namespace gpu {
class VulkanDeviceQueue;
class VulkanImageView {
public:
enum ImageType {
IMAGE_TYPE_COLOR,
IMAGE_TYPE_DEPTH,
IMAGE_TYPE_STENCIL,
IMAGE_TYPE_DEPTH_STENCIL,
NUM_IMAGE_TYPES,
IMAGE_TYPE_INVALID = -1,
};
explicit VulkanImageView(VulkanDeviceQueue* device_queue);
~VulkanImageView();
bool Initialize(VkImage image,
VkImageViewType image_view_type,
ImageType image_type,
VkFormat format,
uint32_t width,
uint32_t height,
uint32_t base_mip_level,
uint32_t mip_levels,
uint32_t base_layer_level,
uint32_t num_layers);
void Destroy();
ImageType image_type() const { return image_type_; }
VkImageView handle() const { return handle_; }
VkFormat format() const { return format_; }
uint32_t width() const { return width_; }
uint32_t height() const { return height_; }
uint32_t mip_levels() const { return mip_levels_; }
uint32_t layers() const { return layers_; }
private:
VulkanDeviceQueue* device_queue_ = nullptr;
ImageType image_type_ = IMAGE_TYPE_INVALID;
VkImageView handle_ = VK_NULL_HANDLE;
VkFormat format_ = VK_FORMAT_UNDEFINED;
uint32_t width_ = 0;
uint32_t height_ = 0;
uint32_t mip_levels_ = 0;
uint32_t layers_ = 0;
DISALLOW_COPY_AND_ASSIGN(VulkanImageView);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_IMAGE_VIEW_H_
This diff is collapsed.
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_RENDER_PASS_H_
#define GPU_VULKAN_VULKAN_RENDER_PASS_H_
#include <vector>
#include <vulkan/vulkan.h>
#include "base/macros.h"
#include "gpu/vulkan/vulkan_export.h"
namespace gpu {
class CommandBufferRecorderBase;
class VulkanDeviceQueue;
class VulkanImageView;
class VulkanSwapChain;
class VULKAN_EXPORT VulkanRenderPass {
public:
enum class AttachmentType {
// Use image view of the swap chain image.
ATTACHMENT_TYPE_SWAP_IMAGE,
// Use image view of the attachment data.
ATTACHMENT_TYPE_ATTACHMENT_VIEW,
};
enum class ImageLayoutType {
// Undefined image layout.
IMAGE_LAYOUT_UNDEFINED,
// Image layout whiches matches the image view.
IMAGE_LAYOUT_TYPE_IMAGE_VIEW,
// Image layout for presenting.
IMAGE_LAYOUT_TYPE_PRESENT,
};
struct AttachmentData {
AttachmentType attachment_type;
VkSampleCountFlagBits sample_count;
VkAttachmentLoadOp load_op;
VkAttachmentStoreOp store_op;
// The stencil ops are only used for IMAGE_TYPE_STENCIL and
// IMAGE_TYPE_DEPTH_STENCIL image views.
VkAttachmentLoadOp stencil_load_op;
VkAttachmentStoreOp stencil_store_op;
ImageLayoutType start_layout;
ImageLayoutType end_layout;
VulkanImageView* image_view; // used for ATTACHMENT_TYPE_ATTACHMENT_VIEW.
VkClearValue clear_value; // used for VK_ATTACHMENT_LOAD_OP_CLEAR.
bool ValidateData(const VulkanSwapChain* swap_chain) const;
};
struct SubpassAttachment {
uint32_t attachment_index;
ImageLayoutType subpass_layout;
};
struct SubpassData {
SubpassData();
SubpassData(const SubpassData& data);
SubpassData(SubpassData&& data);
~SubpassData();
std::vector<SubpassAttachment> subpass_attachments;
bool ValidateData(uint32_t num_attachments) const;
};
struct RenderPassData {
RenderPassData();
RenderPassData(const RenderPassData& data);
RenderPassData(RenderPassData&& data);
~RenderPassData();
std::vector<AttachmentData> attachments;
std::vector<SubpassData> subpass_datas;
bool ValidateData(const VulkanSwapChain* swap_chain) const;
};
explicit VulkanRenderPass(VulkanDeviceQueue* device_queue);
~VulkanRenderPass();
bool Initialize(const VulkanSwapChain* swap_chain,
const RenderPassData& render_pass_data);
void Destroy();
// Begins render pass to command_buffer. The variable exec_inline signifies
// whether or not the subpass commands will be executed inline (within a
// primary command buffer) or whether it will be executed through a secondary
// command buffer.
void BeginRenderPass(const CommandBufferRecorderBase& recorder,
bool exec_inline);
// Begins the next subpass after BeginRenderPass has been called.
void NextSubPass(const CommandBufferRecorderBase& recorder);
// Ends the render passes.
void EndRenderPass(const CommandBufferRecorderBase& recorder);
void SetClearValue(uint32_t attachment_index, VkClearValue clear_value);
private:
VulkanDeviceQueue* device_queue_ = nullptr;
const VulkanSwapChain* swap_chain_ = nullptr;
uint32_t num_sub_passes_ = 0;
uint32_t current_sub_pass_ = 0;
bool executing_ = false;
VkSubpassContents execution_type_ = VK_SUBPASS_CONTENTS_INLINE;
VkRenderPass render_pass_ = VK_NULL_HANDLE;
// There is 1 clear color for every attachment which needs a clear.
std::vector<VkClearValue> attachment_clear_values_;
// There is 1 clear index for every attachment which needs a clear. This is
// kept in a separate array since it is only used setting clear values.
std::vector<uint32_t> attachment_clear_indexes_;
// There is 1 frame buffer for every swap chain image.
std::vector<VkFramebuffer> frame_buffers_;
DISALLOW_COPY_AND_ASSIGN(VulkanRenderPass);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_RENDER_PASS_H_
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_sampler.h"
#include "base/logging.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
VulkanSampler::SamplerOptions::SamplerOptions() = default;
VulkanSampler::SamplerOptions::~SamplerOptions() = default;
VulkanSampler::VulkanSampler(VulkanDeviceQueue* device_queue)
: device_queue_(device_queue) {}
VulkanSampler::~VulkanSampler() {
DCHECK_EQ(static_cast<VkSampler>(VK_NULL_HANDLE), handle_);
}
bool VulkanSampler::Initialize(const SamplerOptions& options) {
DCHECK_EQ(static_cast<VkSampler>(VK_NULL_HANDLE), handle_);
VkSamplerCreateInfo sampler_create_info = {};
sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
sampler_create_info.magFilter = options.mag_filter;
sampler_create_info.minFilter = options.min_filter;
sampler_create_info.mipmapMode = options.mipmap_mode;
sampler_create_info.addressModeU = options.address_mode_u;
sampler_create_info.addressModeV = options.address_mode_v;
sampler_create_info.addressModeW = options.address_mode_w;
sampler_create_info.mipLodBias = options.mip_lod_bias;
sampler_create_info.anisotropyEnable = options.anisotropy_enable;
sampler_create_info.maxAnisotropy = options.max_anisotropy;
sampler_create_info.compareOp = options.compare_op;
sampler_create_info.minLod = options.min_lod;
sampler_create_info.maxLod = options.max_lod;
sampler_create_info.borderColor = options.border_color;
sampler_create_info.unnormalizedCoordinates =
options.unnormalized_coordinates;
VkResult result = vkCreateSampler(device_queue_->GetVulkanDevice(),
&sampler_create_info, nullptr, &handle_);
if (VK_SUCCESS != result) {
DLOG(ERROR) << "vkCreateSampler() failed: " << result;
return false;
}
return true;
}
void VulkanSampler::Destroy() {
if (VK_NULL_HANDLE != handle_) {
vkDestroySampler(device_queue_->GetVulkanDevice(), handle_, nullptr);
handle_ = VK_NULL_HANDLE;
}
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_SAMPLER_H_
#define GPU_VULKAN_VULKAN_SAMPLER_H_
#include <float.h>
#include <vulkan/vulkan.h>
#include "base/macros.h"
#include "gpu/vulkan/vulkan_export.h"
namespace gpu {
class VulkanDeviceQueue;
class VULKAN_EXPORT VulkanSampler {
public:
struct SamplerOptions {
SamplerOptions();
~SamplerOptions();
VkFilter mag_filter = VK_FILTER_NEAREST;
VkFilter min_filter = VK_FILTER_NEAREST;
VkSamplerMipmapMode mipmap_mode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
VkSamplerAddressMode address_mode_u = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
VkSamplerAddressMode address_mode_v = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
VkSamplerAddressMode address_mode_w = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
float mip_lod_bias = 0.0f;
bool anisotropy_enable = false;
float max_anisotropy = 1.0f;
bool compare_enable = false;
VkCompareOp compare_op = VK_COMPARE_OP_NEVER;
float min_lod = 0.0f;
float max_lod = FLT_MAX;
VkBorderColor border_color = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
bool unnormalized_coordinates = false;
};
explicit VulkanSampler(VulkanDeviceQueue* device_queue);
~VulkanSampler();
bool Initialize(const SamplerOptions& options);
void Destroy();
VkSampler handle() const { return handle_; }
private:
VulkanDeviceQueue* device_queue_ = nullptr;
VkSampler handle_ = VK_NULL_HANDLE;
DISALLOW_COPY_AND_ASSIGN(VulkanSampler);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_SAMPLER_H_
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/vulkan/vulkan_shader_module.h"
#include <memory>
#include <sstream>
#include "base/logging.h"
#include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h"
namespace gpu {
VulkanShaderModule::VulkanShaderModule(VulkanDeviceQueue* device_queue)
: device_queue_(device_queue) {
DCHECK(device_queue_);
}
VulkanShaderModule::~VulkanShaderModule() {
DCHECK_EQ(static_cast<VkShaderModule>(VK_NULL_HANDLE), handle_);
}
bool VulkanShaderModule::InitializeSPIRV(ShaderType type,
std::string name,
std::string entry_point,
std::string source) {
DCHECK_EQ(static_cast<VkShaderModule>(VK_NULL_HANDLE), handle_);
shader_type_ = type;
name_ = std::move(name);
entry_point_ = std::move(entry_point);
// Make sure source is a multiple of 4.
const int padding = 4 - (source.length() % 4);
if (padding < 4) {
for (int i = 0; i < padding; ++i) {
source += ' ';
}
}
VkShaderModuleCreateInfo shader_module_create_info = {};
shader_module_create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
shader_module_create_info.pCode =
reinterpret_cast<const uint32_t*>(source.c_str());
shader_module_create_info.codeSize = source.length();
VkShaderModule shader_module = VK_NULL_HANDLE;
VkResult result =
vkCreateShaderModule(device_queue_->GetVulkanDevice(),
&shader_module_create_info, nullptr, &shader_module);
if (VK_SUCCESS != result) {
std::stringstream ss;
ss << "vkCreateShaderModule() failed: " << result;
error_messages_ = ss.str();
DLOG(ERROR) << error_messages_;
return false;
}
handle_ = shader_module;
return true;
}
void VulkanShaderModule::Destroy() {
if (handle_ != VK_NULL_HANDLE) {
vkDestroyShaderModule(device_queue_->GetVulkanDevice(), handle_, nullptr);
handle_ = VK_NULL_HANDLE;
}
entry_point_.clear();
error_messages_.clear();
}
} // namespace gpu
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_VULKAN_VULKAN_SHADER_MODULE_H_
#define GPU_VULKAN_VULKAN_SHADER_MODULE_H_
#include <string>
#include <vulkan/vulkan.h>
#include "base/macros.h"
#include "gpu/vulkan/vulkan_export.h"
namespace gpu {
class VulkanDeviceQueue;
class VULKAN_EXPORT VulkanShaderModule {
public:
enum class ShaderType {
VERTEX,
FRAGMENT,
};
explicit VulkanShaderModule(VulkanDeviceQueue* device_queue);
~VulkanShaderModule();
bool InitializeSPIRV(ShaderType type,
std::string name,
std::string entry_point,
std::string source);
void Destroy();
bool IsValid() const { return handle_ != VK_NULL_HANDLE; }
std::string GetErrorMessages() const { return error_messages_; }
ShaderType shader_type() const { return shader_type_; }
const std::string& name() const { return name_; }
VkShaderModule handle() const { return handle_; }
const std::string& entry_point() const { return entry_point_; }
private:
VulkanDeviceQueue* device_queue_ = nullptr;
ShaderType shader_type_ = ShaderType::VERTEX;
VkShaderModule handle_ = VK_NULL_HANDLE;
std::string name_;
std::string entry_point_;
std::string error_messages_;
DISALLOW_COPY_AND_ASSIGN(VulkanShaderModule);
};
} // namespace gpu
#endif // GPU_VULKAN_VULKAN_SHADER_MODULE_H_
...@@ -8,7 +8,6 @@ ...@@ -8,7 +8,6 @@
#include "gpu/vulkan/vulkan_command_pool.h" #include "gpu/vulkan/vulkan_command_pool.h"
#include "gpu/vulkan/vulkan_device_queue.h" #include "gpu/vulkan/vulkan_device_queue.h"
#include "gpu/vulkan/vulkan_function_pointers.h" #include "gpu/vulkan/vulkan_function_pointers.h"
#include "gpu/vulkan/vulkan_image_view.h"
namespace gpu { namespace gpu {
...@@ -311,14 +310,6 @@ bool VulkanSwapChain::InitializeSwapImages( ...@@ -311,14 +310,6 @@ bool VulkanSwapChain::InitializeSwapImages(
command_pool_->CreatePrimaryCommandBuffer(); command_pool_->CreatePrimaryCommandBuffer();
image_data->post_raster_command_buffer = image_data->post_raster_command_buffer =
command_pool_->CreatePrimaryCommandBuffer(); command_pool_->CreatePrimaryCommandBuffer();
// Create the image view.
image_data->image_view.reset(new VulkanImageView(device_queue_));
if (!image_data->image_view->Initialize(
images[i], VK_IMAGE_VIEW_TYPE_2D, VulkanImageView::IMAGE_TYPE_COLOR,
surface_format.format, size_.width(), size_.height(), 0, 1, 0, 1)) {
return false;
}
} }
result = vkCreateSemaphore(device, &semaphore_create_info, nullptr, result = vkCreateSemaphore(device, &semaphore_create_info, nullptr,
...@@ -364,12 +355,6 @@ void VulkanSwapChain::DestroySwapImages() { ...@@ -364,12 +355,6 @@ void VulkanSwapChain::DestroySwapImages() {
image_data->post_raster_command_buffer.reset(); image_data->post_raster_command_buffer.reset();
} }
// Destroy Image View.
if (image_data->image_view) {
image_data->image_view->Destroy();
image_data->image_view.reset();
}
// Destroy Semaphores. // Destroy Semaphores.
if (VK_NULL_HANDLE != image_data->present_semaphore) { if (VK_NULL_HANDLE != image_data->present_semaphore) {
vkDestroySemaphore(device, image_data->present_semaphore, nullptr); vkDestroySemaphore(device, image_data->present_semaphore, nullptr);
......
...@@ -19,7 +19,6 @@ namespace gpu { ...@@ -19,7 +19,6 @@ namespace gpu {
class VulkanCommandBuffer; class VulkanCommandBuffer;
class VulkanCommandPool; class VulkanCommandPool;
class VulkanDeviceQueue; class VulkanDeviceQueue;
class VulkanImageView;
class VulkanSwapChain { class VulkanSwapChain {
public: public:
...@@ -39,15 +38,6 @@ class VulkanSwapChain { ...@@ -39,15 +38,6 @@ class VulkanSwapChain {
uint32_t current_image() const { return current_image_; } uint32_t current_image() const { return current_image_; }
const gfx::Size& size() const { return size_; } const gfx::Size& size() const { return size_; }
VulkanImageView* GetImageView(uint32_t index) const {
DCHECK_LT(index, images_.size());
return images_[index]->image_view.get();
}
VulkanImageView* GetCurrentImageView() const {
return GetImageView(current_image_);
}
VulkanCommandBuffer* GetCurrentCommandBuffer() const { VulkanCommandBuffer* GetCurrentCommandBuffer() const {
DCHECK_LT(current_image_, images_.size()); DCHECK_LT(current_image_, images_.size());
return images_[current_image_]->pre_raster_command_buffer.get(); return images_[current_image_]->pre_raster_command_buffer.get();
...@@ -92,7 +82,6 @@ class VulkanSwapChain { ...@@ -92,7 +82,6 @@ class VulkanSwapChain {
VkImage image = VK_NULL_HANDLE; VkImage image = VK_NULL_HANDLE;
VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED; VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED;
std::unique_ptr<VulkanImageView> image_view;
std::unique_ptr<VulkanCommandBuffer> pre_raster_command_buffer; std::unique_ptr<VulkanCommandBuffer> pre_raster_command_buffer;
std::unique_ptr<VulkanCommandBuffer> post_raster_command_buffer; std::unique_ptr<VulkanCommandBuffer> post_raster_command_buffer;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment