summaryrefslogtreecommitdiff
path: root/src/vulkan_helper.cpp
diff options
context:
space:
mode:
authorChuyan Zhang <me@zcy.moe>2024-09-09 00:30:29 -0700
committerChuyan Zhang <me@zcy.moe>2024-09-09 00:30:29 -0700
commit7f14138e1baa2c40fb30d90ebcd45ad17b12e0a3 (patch)
treed9ad5dbb2871e3999480e55ffa24bea6e50f8dd4 /src/vulkan_helper.cpp
parent2ead02037dc89e987fbc0a021fe470e29d226cfd (diff)
downloadiris-7f14138e1baa2c40fb30d90ebcd45ad17b12e0a3.tar.gz
iris-7f14138e1baa2c40fb30d90ebcd45ad17b12e0a3.zip
Fixing swapchain
Diffstat (limited to 'src/vulkan_helper.cpp')
-rw-r--r--src/vulkan_helper.cpp323
1 files changed, 306 insertions, 17 deletions
diff --git a/src/vulkan_helper.cpp b/src/vulkan_helper.cpp
index e533d2d..339b9c0 100644
--- a/src/vulkan_helper.cpp
+++ b/src/vulkan_helper.cpp
@@ -1,11 +1,25 @@
#include "vulkan_helper.h"
#include "vulkan/vulkan_core.h"
+#include <limits>
+#define STB_IMAGE_IMPLEMENTATION
+#include "stb_image.h"
#include <cstdint>
+#include <cstring>
#include <cstdlib>
#include <iostream>
#include <memory>
+#define VMA_IMPLEMENTATION
#include <vk_mem_alloc.h>
+#ifdef USE_VULKAN_VALIDATION_LAYERS
+static VKAPI_ATTR VkBool32 VKAPI_CALL debug_report(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, void* pUserData)
+{
+ (void)flags; (void)object; (void)location; (void)messageCode; (void)pUserData; (void)pLayerPrefix; // Unused arguments
+ fprintf(stderr, "[vulkan] Debug report from ObjectType: %i\nMessage: %s\n\n", objectType, pMessage);
+ return VK_FALSE;
+}
+#endif
+
namespace iris {
Device::Device(std::vector<std::string> layers, std::vector<std::string> instance_extensions) {
@@ -44,6 +58,19 @@ Device::Device(std::vector<std::string> layers, std::vector<std::string> instanc
VK_NULL_HANDLE,
&instance));
+#ifdef USE_VULKAN_VALIDATION_LAYERS
+ layers.push_back("VK_LAYER_KHRONOS_validation");
+ auto vkCreateDebugReportCallback = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugReportCallbackEXT");
+ if (vkCreateDebugReportCallback) {
+ VkDebugReportCallbackCreateInfoEXT debug_report_create_info = {
+ .sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+ .flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ .pfnCallback = debug_report,
+ };
+ CHECK_VULKAN(vkCreateDebugReportCallback(instance, &debug_report_create_info, nullptr, &debugReportCallback));
+ }
+#endif
+
// Enumerate and select the physical device
uint32_t physical_device_count = 0;
CHECK_VULKAN(vkEnumeratePhysicalDevices(
@@ -117,6 +144,7 @@ Device::Device(std::vector<std::string> layers, std::vector<std::string> instanc
};
constexpr char *device_extensions[] = {
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME,
VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME,
VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME,
VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME,
@@ -154,28 +182,121 @@ Device::Device(std::vector<std::string> layers, std::vector<std::string> instanc
&allocator));
}
-Device::~Device() {
+// not handled by RAII, manually call at the end.
+void Device::destroy() {
vmaDestroyAllocator(allocator);
vkDestroyDevice(device, VK_NULL_HANDLE);
vkDestroyInstance(instance, VK_NULL_HANDLE);
}
+CommandBuffer::CommandBuffer(VkDevice device,
+ uint32_t queue_family_index,
+ VkQueue queue)
+ : device(device), queue(queue)
+{
+ VkCommandPoolCreateInfo pool_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+ .queueFamilyIndex = queue_family_index, // TODO: query capabilities to find a proper queue index
+ };
+ CHECK_VULKAN(vkCreateCommandPool(
+ device,
+ &pool_info,
+ VK_NULL_HANDLE,
+ &this->pool));
+
+ VkCommandBufferAllocateInfo buffer_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+ .commandPool = this->pool,
+ .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+ .commandBufferCount = 1,
+ };
+ CHECK_VULKAN(vkAllocateCommandBuffers(
+ device,
+ &buffer_info,
+ &buffer));
+
+ VkFenceCreateInfo fence_info = {
+ .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+ };
+ CHECK_VULKAN(vkCreateFence(
+ device,
+ &fence_info,
+ VK_NULL_HANDLE,
+ &fence));
+}
+
+CommandBuffer::~CommandBuffer() {
+ vkDestroyFence(device, fence, VK_NULL_HANDLE);
+ vkFreeCommandBuffers(device, pool, 1, &buffer);
+ vkDestroyCommandPool(device, pool, VK_NULL_HANDLE);
+}
+
+CommandBuffer Device::create_command_buffer() {
+ return CommandBuffer(device, main_queue_family_index, graphics_queue);
+}
+
+void CommandBuffer::begin(VkCommandBufferUsageFlags flags) {
+ CHECK_VULKAN(vkResetFences(device, 1, &fence));
+ CHECK_VULKAN(vkResetCommandPool(device, pool, 0u));
+
+ VkCommandBufferBeginInfo begin_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = flags,
+ };
+ CHECK_VULKAN(vkBeginCommandBuffer(buffer, &begin_info));
+}
+
+void CommandBuffer::submit_sync() {
+ CHECK_VULKAN(vkEndCommandBuffer(buffer));
+ VkPipelineStageFlags wait_stage = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+ VkSubmitInfo submit_info = {
+ .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ .waitSemaphoreCount = 0,
+ .pWaitSemaphores = VK_NULL_HANDLE,
+ .pWaitDstStageMask = &wait_stage,
+ .commandBufferCount = 1,
+ .pCommandBuffers = &buffer,
+ };
+ CHECK_VULKAN(vkQueueSubmit(
+ queue,
+ 1,
+ &submit_info,
+ fence));
+ CHECK_VULKAN(vkWaitForFences(
+ device,
+ 1,
+ &fence,
+ VK_TRUE,
+ std::numeric_limits<uint64_t>::max()));
+}
+
+void *Buffer_t::map() {
+ if (mapped_data == nullptr) {
+ CHECK_VULKAN(vmaMapMemory(allocator, allocation, &mapped_data));
+ }
+ return mapped_data;
+}
+
+void Buffer_t::unmap() {
+ if (mapped_data != nullptr) {
+ vmaUnmapMemory(allocator, allocation);
+ mapped_data = nullptr;
+ }
+}
Buffer_t::~Buffer_t() {
vmaDestroyBuffer(allocator, buffer, allocation);
}
-Buffer Device::create_buffer(VkDeviceSize size,
- VkBufferUsageFlags usage,
- VmaMemoryUsage memory_usage) {
+Buffer_t Device::create_buffer_raw(VkDeviceSize size,
+ VkBufferUsageFlags usage,
+ VmaAllocationCreateInfo create_info)
+{
VkBufferCreateInfo buffer_info = {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = size,
.usage = usage,
};
- VmaAllocationCreateInfo allocation_info = {
- .usage = memory_usage,
- };
Buffer_t buffer = {
.allocator = this->allocator,
.flags = usage,
@@ -184,11 +305,18 @@ Buffer Device::create_buffer(VkDeviceSize size,
CHECK_VULKAN(vmaCreateBuffer(
allocator,
&buffer_info,
- &allocation_info,
+ &create_info,
&buffer.buffer,
&buffer.allocation,
VK_NULL_HANDLE));
- return std::make_shared<Buffer_t>(buffer);
+ return buffer;
+}
+
+Buffer Device::create_buffer(VkDeviceSize size,
+ VkBufferUsageFlags usage,
+ VmaAllocationCreateInfo create_info)
+{
+ return std::make_shared<Buffer_t>(create_buffer_raw(size, usage, create_info));
}
Texture2D_t::~Texture2D_t() {
@@ -196,10 +324,11 @@ Texture2D_t::~Texture2D_t() {
// TODO: optionally destroy image view, if created
}
-Texture2D Device::create_texture(VkExtent2D extent,
- VkFormat format,
- VkImageUsageFlags usage,
- VmaMemoryUsage memory_usage) {
+Texture2D_t Device::create_texture_raw(VkExtent2D extent,
+ VkFormat format,
+ VkImageUsageFlags usage,
+ VmaAllocationCreateInfo create_info)
+{
VkImageCreateInfo image_info = {
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
.imageType = VK_IMAGE_TYPE_2D,
@@ -211,21 +340,181 @@ Texture2D Device::create_texture(VkExtent2D extent,
.tiling = VK_IMAGE_TILING_OPTIMAL,
.usage = usage,
};
- VmaAllocationCreateInfo allocation_info = {
- .usage = memory_usage,
- };
Texture2D_t texture = {
.allocator = this->allocator,
+ .layout = VK_IMAGE_LAYOUT_UNDEFINED,
.flags = usage,
.extent = extent,
};
CHECK_VULKAN(vmaCreateImage(
allocator,
&image_info,
- &allocation_info,
+ &create_info,
&texture.image,
&texture.allocation,
VK_NULL_HANDLE));
+ return texture;
+}
+
+Texture2D Device::create_texture(VkExtent2D extent,
+ VkFormat format,
+ VkImageUsageFlags usage,
+ VmaAllocationCreateInfo create_info)
+{
+ return std::make_shared<Texture2D_t>(create_texture_raw(extent, format, usage, create_info));
+}
+
+Texture2D Device::create_texture_from_image(const char* filename,
+ VkFormat format,
+ VkImageUsageFlags usage,
+ VmaAllocationCreateInfo create_info) {
+ int width, height, channels;
+ stbi_uc* pixels = stbi_load(filename, &width, &height, &channels, STBI_rgb_alpha);
+ if (pixels == nullptr) {
+ // TODO throw an exception
+ std::cerr << "Failed to load image: " << filename << std::endl;
+ abort();
+ }
+
+ // destroy after use, don't need to wrap with shared_ptr
+ auto staging_buf = create_buffer_raw(
+ width * height * 4,
+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ VmaAllocationCreateInfo {
+ .flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT
+ | VMA_ALLOCATION_CREATE_MAPPED_BIT,
+ .usage = VMA_MEMORY_USAGE_AUTO,
+ });
+ std::memcpy(
+ staging_buf.map(),
+ pixels,
+ width * height * 4);
+ staging_buf.unmap();
+ stbi_image_free(pixels);
+
+ VkExtent2D extent = {static_cast<uint32_t>(width), static_cast<uint32_t>(height)};
+ Texture2D_t texture = create_texture_raw(extent, format, usage, create_info);
+
+ // Transit image layout for copying
+ {
+ CommandBuffer cmd_buf = create_command_buffer();
+ VkCommandBufferBeginInfo begin_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ };
+ CHECK_VULKAN(vkBeginCommandBuffer(cmd_buf.buffer, &begin_info));
+ VkImageMemoryBarrier barrier = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ .srcAccessMask = 0,
+ .dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
+ .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
+ .newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .image = texture.image,
+ .subresourceRange = {
+ .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
+ .levelCount = 1,
+ .layerCount = 1,
+ },
+ };
+ vkCmdPipelineBarrier(
+ cmd_buf.buffer,
+ VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+ VK_PIPELINE_STAGE_TRANSFER_BIT,
+ 0,
+ 0, nullptr,
+ 0, nullptr,
+ 1, &barrier);
+ CHECK_VULKAN(vkEndCommandBuffer(cmd_buf.buffer));
+ VkSubmitInfo submit_info = {
+ .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ .commandBufferCount = 1,
+ .pCommandBuffers = &cmd_buf.buffer,
+ };
+ CHECK_VULKAN(vkQueueSubmit(graphics_queue, 1, &submit_info, cmd_buf.fence));
+ CHECK_VULKAN(vkWaitForFences(cmd_buf.device, 1, &cmd_buf.fence, VK_TRUE, std::numeric_limits<uint64_t>::max()));
+ }
+
+ // copy staging buffer to texture
+ {
+ CommandBuffer cmd_buf = create_command_buffer();
+ VkCommandBufferBeginInfo begin_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ };
+ CHECK_VULKAN(vkBeginCommandBuffer(cmd_buf.buffer, &begin_info));
+ VkBufferImageCopy region = {
+ .bufferOffset = 0,
+ .bufferRowLength = 0,
+ .bufferImageHeight = 0,
+ .imageSubresource = {
+ .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
+ .mipLevel = 0,
+ .baseArrayLayer = 0,
+ .layerCount = 1,
+ },
+ .imageOffset = {0, 0, 0},
+ .imageExtent = {(uint32_t) width, (uint32_t) height, 1},
+ };
+ vkCmdCopyBufferToImage(
+ cmd_buf.buffer,
+ staging_buf.buffer,
+ texture.image,
+ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ 1,
+ &region);
+ CHECK_VULKAN(vkEndCommandBuffer(cmd_buf.buffer));
+ VkSubmitInfo submit_info = {
+ .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ .commandBufferCount = 1,
+ .pCommandBuffers = &cmd_buf.buffer,
+ };
+ CHECK_VULKAN(vkQueueSubmit(graphics_queue, 1, &submit_info, cmd_buf.fence));
+ CHECK_VULKAN(vkWaitForFences(cmd_buf.device, 1, &cmd_buf.fence, VK_TRUE, std::numeric_limits<uint64_t>::max()));
+ }
+
+ // Transit image layout back for use
+ {
+ CommandBuffer cmd_buf = create_command_buffer();
+ VkCommandBufferBeginInfo begin_info = {
+ .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+ .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ };
+ CHECK_VULKAN(vkBeginCommandBuffer(cmd_buf.buffer, &begin_info));
+ VkImageMemoryBarrier barrier = {
+ .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+ .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
+ .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ .oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+ .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
+ .image = texture.image,
+ .subresourceRange = {
+ .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
+ .levelCount = 1,
+ .layerCount = 1,
+ },
+ };
+ vkCmdPipelineBarrier(
+ cmd_buf.buffer,
+ VK_PIPELINE_STAGE_TRANSFER_BIT,
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ 0,
+ 0, nullptr,
+ 0, nullptr,
+ 1, &barrier);
+ CHECK_VULKAN(vkEndCommandBuffer(cmd_buf.buffer));
+ VkSubmitInfo submit_info = {
+ .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+ .commandBufferCount = 1,
+ .pCommandBuffers = &cmd_buf.buffer,
+ };
+ CHECK_VULKAN(vkQueueSubmit(graphics_queue, 1, &submit_info, cmd_buf.fence));
+ CHECK_VULKAN(vkWaitForFences(cmd_buf.device, 1, &cmd_buf.fence, VK_TRUE, std::numeric_limits<uint64_t>::max()));
+ }
+ texture.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
return std::make_shared<Texture2D_t>(texture);
}