Synchronizer &sync = device.get_synchronizer();
sync.reset();
- sync.barrier(current_buffer);
bool to_present = false;
unsigned n_attachments = framebuffer->get_format().size();
for(unsigned i=0; i<n_attachments; ++i)
if(dynamic_cast<const SwapChainTexture *>(framebuffer->get_attachment(i)))
to_present = true;
+ if(!to_present)
+ framebuffer->synchronize(clear);
VkRenderPass render_pass = device.get_pipeline_cache().get_render_pass(framebuffer->get_format(), clear, !clear_values, to_present);
framebuffer->refresh();
+ sync.barrier(current_buffer);
+
VkRenderPassBeginInfo begin_info = { };
begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
begin_info.renderPass = handle_cast<::VkRenderPass>(render_pass);
destroy<VkFramebuffer, &VulkanFunctions::DestroyFramebuffer>(handle);
}
+void DestroyQueue::destroy(VkImage handle, unsigned mem_id)
+{
+ destroy<VkImage, &VulkanFunctions::DestroyImage>(handle, mem_id);
+}
+
void DestroyQueue::destroy(VkImageView handle)
{
destroy<VkImageView, &VulkanFunctions::DestroyImageView>(handle);
}
+void DestroyQueue::destroy(VkSampler handle)
+{
+ destroy<VkSampler, &VulkanFunctions::DestroySampler>(handle);
+}
+
void DestroyQueue::destroy(VkSemaphore handle)
{
destroy<VkSemaphore, &VulkanFunctions::DestroySemaphore>(handle);
void destroy(VkBuffer, unsigned);
void destroy(VkFence);
void destroy(VkFramebuffer);
+ void destroy(VkImage, unsigned);
void destroy(VkImageView);
+ void destroy(VkSampler);
void destroy(VkSemaphore);
private:
set_vulkan_object_name();
}
+void VulkanFramebuffer::synchronize(bool discard) const
+{
+ for(const Framebuffer::Attachment &a: static_cast<const Framebuffer *>(this)->attachments)
+ a.tex->synchronize(a.layer, get_vulkan_attachment_layout(get_components(a.tex->get_format())), discard);
+}
+
void VulkanFramebuffer::set_debug_name(const string &name)
{
#ifdef DEBUG
void update(unsigned) const;
void require_complete() const { }
+ void synchronize(bool = false) const;
+
void set_debug_name(const std::string &);
void set_vulkan_object_name() const;
};
return id;
}
+unsigned MemoryAllocator::allocate(VkImage image, MemoryType type)
+{
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkMemoryRequirements requirements;
+ vk.GetImageMemoryRequirements(image, requirements);
+
+ unsigned id = allocate(requirements.size, requirements.memoryTypeBits, type);
+
+ vk.BindImageMemory(image, get_allocation(id).memory, 0);
+
+ return id;
+}
+
void MemoryAllocator::release(unsigned id)
{
Allocation &alloc = get_allocation(id);
public:
unsigned allocate(VkBuffer, MemoryType);
+ unsigned allocate(VkImage, MemoryType);
void release(unsigned);
std::size_t get_allocation_size(unsigned) const;
changed_sets |= 1<<(u.binding>>20);
u.changed = false;
}
+ for(const PipelineState::BoundTexture &t: self.textures)
+ if(t.changed || changed_sets==~0U)
+ {
+ t.used = self.shprog->uses_binding(t.binding);
+ changed_sets |= 1<<(t.binding>>20);
+ if(t.sampler)
+ t.sampler->refresh();
+ t.changed = false;
+ }
descriptor_set_handles.resize(self.shprog->get_n_descriptor_sets());
for(unsigned i=0; i<descriptor_set_handles.size(); ++i)
result = hash_update<64>(result, b.binding);
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(b.block));
}
+ for(const PipelineState::BoundTexture &t: self.textures)
+ if(t.used && (t.binding>>20)==index)
+ {
+ result = hash_update<64>(result, t.binding);
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(t.texture));
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(t.sampler));
+ }
return result;
}
for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
if(u.used && u.binding>=0 && static_cast<unsigned>(u.binding>>20)==index)
++n_buffers;
-
- StructureBuilder sb(buffer, 2);
- VkWriteDescriptorSet *&writes = sb.add<VkWriteDescriptorSet>(n_buffers);
+ unsigned n_images = 0;
+ for(const PipelineState::BoundTexture &t: self.textures)
+ if(t.used && (t.binding>>20)==index)
+ ++n_images;
+ unsigned n_writes = n_buffers+n_images;
+
+ StructureBuilder sb(buffer, 3);
+ VkWriteDescriptorSet *&writes = sb.add<VkWriteDescriptorSet>(n_writes);
VkDescriptorBufferInfo *&buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
+ VkDescriptorImageInfo *&images = sb.add<VkDescriptorImageInfo>(n_images);
VkWriteDescriptorSet *write_ptr = writes;
VkDescriptorBufferInfo *buffer_ptr = buffers;
+ VkDescriptorImageInfo *image_ptr = images;
for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
if(u.used && u.binding>=0 && static_cast<unsigned>(u.binding>>20)==index)
++write_ptr;
}
- return n_buffers;
+ for(const PipelineState::BoundTexture &t: self.textures)
+ if(t.used && (t.binding>>20)==index)
+ {
+ image_ptr->sampler = handle_cast<::VkSampler>(t.sampler->handle);
+ image_ptr->imageView = handle_cast<::VkImageView>(t.texture->view_handle);
+ image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
+ write_ptr->dstBinding = t.binding&0xFFFFF;
+ write_ptr->descriptorCount = 1;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ write_ptr->pImageInfo = image_ptr;
+
+ ++image_ptr;
+ ++write_ptr;
+ }
+
+ return n_writes;
}
void VulkanPipelineState::apply(VkCommandBuffer command_buffer) const
+#include "destroyqueue.h"
+#include "device.h"
#include "sampler.h"
#include "sampler_backend.h"
+#include "vulkan.h"
using namespace std;
namespace Msp {
namespace GL {
-VulkanSampler::VulkanSampler()
-{
- throw logic_error("VulkanSampler is unimplemented");
-}
+VulkanSampler::VulkanSampler():
+ device(Device::get_current())
+{ }
VulkanSampler::VulkanSampler(VulkanSampler &&other):
- handle(other.handle)
+ device(other.device),
+ handle(other.handle),
+ debug_name(move(other.debug_name))
{
other.handle = 0;
}
VulkanSampler::~VulkanSampler()
-{ }
+{
+ if(handle)
+ device.get_destroy_queue().destroy(handle);
+}
void VulkanSampler::update(unsigned) const
-{ }
+{
+ const Sampler &self = *static_cast<const Sampler *>(this);
+ const VulkanFunctions &vk = device.get_functions();
-void VulkanSampler::set_debug_name(const string &)
-{ }
+ if(handle)
+ device.get_destroy_queue().destroy(handle);
+
+ VkSamplerCreateInfo sampler_info = { };
+ sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+ sampler_info.magFilter = static_cast<VkFilter>(get_vulkan_filter(self.mag_filter));
+ sampler_info.minFilter = static_cast<VkFilter>(get_vulkan_filter(self.min_filter));
+ sampler_info.mipmapMode = static_cast<VkSamplerMipmapMode>(get_vulkan_mipmap_mode(self.min_filter));
+ sampler_info.addressModeU = static_cast<VkSamplerAddressMode>(get_vulkan_address_mode(self.wrap_s));
+ sampler_info.addressModeV = static_cast<VkSamplerAddressMode>(get_vulkan_address_mode(self.wrap_t));
+ sampler_info.addressModeW = static_cast<VkSamplerAddressMode>(get_vulkan_address_mode(self.wrap_r));
+ sampler_info.anisotropyEnable = (self.max_anisotropy>1.0f);
+ sampler_info.maxAnisotropy = self.max_anisotropy;
+ sampler_info.compareEnable = self.compare;
+ sampler_info.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.cmp_func));
+ sampler_info.maxLod = VK_LOD_CLAMP_NONE;
+ // TODO Vulkan does not allow arbitrary border colors
+
+ vk.CreateSampler(sampler_info, handle);
+
+#ifdef DEBUG
+ if(!debug_name.empty())
+ set_vulkan_object_name();
+#endif
+}
+
+void VulkanSampler::set_debug_name(const string &name)
+{
+#ifdef DEBUG
+ debug_name = name;
+ if(handle)
+ set_vulkan_object_name();
+#else
+ (void)name;
+#endif
+}
+
+void VulkanSampler::set_vulkan_object_name() const
+{
+#ifdef DEBUG
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkDebugUtilsObjectNameInfoEXT name_info = { };
+ name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+ name_info.objectType = VK_OBJECT_TYPE_SAMPLER;
+ name_info.objectHandle = reinterpret_cast<uint64_t>(handle);
+ name_info.pObjectName = debug_name.c_str();
+ vk.SetDebugUtilsObjectName(name_info);
+#endif
+}
+
+
+unsigned get_vulkan_filter(unsigned filter)
+{
+ switch(filter)
+ {
+ case NEAREST:
+ case NEAREST_MIPMAP_NEAREST:
+ case NEAREST_MIPMAP_LINEAR: return VK_FILTER_NEAREST;
+ case LINEAR:
+ case LINEAR_MIPMAP_NEAREST:
+ case LINEAR_MIPMAP_LINEAR: return VK_FILTER_LINEAR;
+ default: throw invalid_argument("get_vulkan_filter");
+ }
+}
+
+unsigned get_vulkan_mipmap_mode(unsigned filter)
+{
+ switch(filter)
+ {
+ case NEAREST:
+ case LINEAR: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
+ case NEAREST_MIPMAP_NEAREST:
+ case NEAREST_MIPMAP_LINEAR:
+ case LINEAR_MIPMAP_NEAREST:
+ case LINEAR_MIPMAP_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
+ default: throw invalid_argument("get_vulkan_mipmap_mode");
+ }
+}
+
+unsigned get_vulkan_address_mode(unsigned wrap)
+{
+ switch(wrap)
+ {
+ case REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
+ case CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+ case CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
+ case MIRRORED_REPEAT: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
+ default: throw invalid_argument("get_vulkan_address_mode");
+ }
+}
} // namespace GL
} // namespace Msp
namespace Msp {
namespace GL {
+class Device;
+
class VulkanSampler
{
friend class VulkanPipelineState;
protected:
- VkSampler handle;
+ Device &device;
+ mutable VkSampler handle = 0;
+ std::string debug_name;
VulkanSampler();
VulkanSampler(VulkanSampler &&);
void update(unsigned) const;
void set_debug_name(const std::string &);
+ void set_vulkan_object_name() const;
};
using SamplerBackend = VulkanSampler;
+unsigned get_vulkan_filter(unsigned);
+unsigned get_vulkan_mipmap_mode(unsigned);
+unsigned get_vulkan_address_mode(unsigned);
+
} // namespace GL
} // namespace Msp
i->pending_write = true;
}
+void Synchronizer::access(VkImage image, unsigned aspect, int layer, unsigned layout, bool discard)
+{
+ auto i = find_member(image_accesses, image, &ImageAccess::image);
+ if(i==image_accesses.end())
+ {
+ i = image_accesses.emplace(image_accesses.end());
+ i->image = image;
+ i->aspect = aspect;
+ i->layer = layer;
+ i->current_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ }
+
+ if(discard)
+ i->current_layout = VK_IMAGE_LAYOUT_UNDEFINED;
+ if(layer!=i->layer)
+ i->layer = -1;
+ i->pending_layout = layout;
+}
+
void Synchronizer::reset()
{
for(BufferAccess &b: buffer_accesses)
b.pending_write = false;
+ for(ImageAccess &i: image_accesses)
+ i.pending_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
void Synchronizer::barrier(VkCommandBuffer command_buffer)
{
const VulkanFunctions &vk = device.get_functions();
- if(buffer_accesses.empty())
+ if(buffer_accesses.empty() && image_accesses.empty())
return;
VkPipelineStageFlags src_stage = 0;
dst_stage |= (b.pending_write ? buffer_write_stages : buffer_read_stages);
}
+ static constexpr VkPipelineStageFlags image_read_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT|VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT|
+ VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
+ static constexpr VkPipelineStageFlags image_write_stages = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT|
+ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT|VK_PIPELINE_STAGE_TRANSFER_BIT;
+
+ vector<VkImageMemoryBarrier> image_barriers;
+ image_barriers.reserve(image_accesses.size());
+ for(const ImageAccess &i: image_accesses)
+ {
+ image_barriers.emplace_back(VkImageMemoryBarrier{ });
+ VkImageMemoryBarrier &barrier = image_barriers.back();
+
+ barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+ barrier.srcAccessMask = (is_write_layout(i.current_layout) ? VK_ACCESS_MEMORY_WRITE_BIT : 0);
+ barrier.dstAccessMask = (is_write_layout(i.pending_layout) ? VK_ACCESS_MEMORY_WRITE_BIT : VK_ACCESS_MEMORY_READ_BIT);
+ barrier.oldLayout = static_cast<VkImageLayout>(i.current_layout);
+ barrier.newLayout = static_cast<VkImageLayout>(i.pending_layout);
+ barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
+ barrier.image = handle_cast<::VkImage>(i.image);
+ barrier.subresourceRange.aspectMask = i.aspect;
+ barrier.subresourceRange.baseMipLevel = 0;
+ barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
+ if(i.layer>=0)
+ {
+ barrier.subresourceRange.baseArrayLayer = i.layer;
+ barrier.subresourceRange.layerCount = 1;
+ }
+ else
+ {
+ barrier.subresourceRange.baseArrayLayer = 0;
+ barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
+ }
+
+ if(i.current_layout!=VK_IMAGE_LAYOUT_UNDEFINED)
+ src_stage |= (is_write_layout(i.current_layout) ? image_write_stages : image_read_stages);
+ dst_stage |= (is_write_layout(i.pending_layout) ? image_write_stages : image_read_stages);
+ }
+
if(!src_stage)
src_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
if(!dst_stage)
dst_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
vk.CmdPipelineBarrier(command_buffer, src_stage, dst_stage, 0, 0, 0,
- buffer_barriers.size(), buffer_barriers.data(), 0, 0);
+ buffer_barriers.size(), buffer_barriers.data(), image_barriers.size(), image_barriers.data());
for(auto i=buffer_accesses.begin(); i!=buffer_accesses.end(); )
{
++i;
}
}
+
+ for(auto i=image_accesses.begin(); i!=image_accesses.end(); )
+ {
+ if(i->pending_layout==VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
+ i = image_accesses.erase(i);
+ else
+ {
+ i->current_layout = i->pending_layout;
+ ++i;
+ }
+ }
+}
+
+bool Synchronizer::is_write_layout(unsigned layout)
+{
+ return layout==VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL || layout==VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL ||
+ layout==VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
}
} // namespace GL
class Synchronizer
{
private:
+ struct ImageAccess
+ {
+ VkImage image = 0;
+ unsigned aspect;
+ int layer = -1;
+ unsigned current_layout;
+ unsigned pending_layout;
+ };
+
struct BufferAccess
{
VkBuffer buffer = 0;
Device &device;
std::vector<BufferAccess> buffer_accesses;
+ std::vector<ImageAccess> image_accesses;
public:
Synchronizer(Device &);
void access(VkBuffer, std::size_t, std::size_t);
+ void access(VkImage, unsigned, int, unsigned, bool);
void reset();
void barrier(VkCommandBuffer);
+
+private:
+ bool is_write_layout(unsigned);
};
} // namespace GL
+#include "device.h"
+#include "texture1d.h"
#include "texture1d_backend.h"
#include "vulkan.h"
+using namespace std;
+
namespace Msp {
namespace GL {
VulkanTexture1D::VulkanTexture1D():
Texture(VK_IMAGE_VIEW_TYPE_1D)
+{ }
+
+void VulkanTexture1D::fill_image_info(void *ii) const
{
- throw std::logic_error("VulkanTexture1D is unimplemented");
+ const Texture1D &self = *static_cast<const Texture1D *>(this);
+
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->imageType = VK_IMAGE_TYPE_1D;
+ image_info->extent.width = self.width;
+ image_info->mipLevels = self.levels;
}
-void VulkanTexture1D::sub_image(unsigned, int, unsigned, const void *)
-{ }
+void VulkanTexture1D::sub_image(unsigned level, int x, unsigned wd, const void *data)
+{
+ const Texture1D &self = *static_cast<const Texture1D *>(this);
+
+ unsigned level_size = self.get_level_size(level);
+ synchronize(-1, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (x==0 && wd==level_size));
+
+ size_t data_size = wd*get_pixel_size(storage_fmt);
+ void *staging = device.get_transfer_queue().prepare_transfer(data_size,
+ [this, level, x, wd](VkCommandBuffer cmd_buf, VkBuffer staging_buf, size_t src_off){
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkBufferImageCopy region = { };
+ region.bufferOffset = src_off;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.mipLevel = level;
+ region.imageSubresource.baseArrayLayer = 0;
+ region.imageSubresource.layerCount = 1;
+ region.imageOffset = { x, 0, 0 };
+ region.imageExtent = { wd, 1, 1 };
+ vk.CmdCopyBufferToImage(cmd_buf, staging_buf, handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
+ });
+
+ const char *src = static_cast<const char *>(data);
+ copy(src, src+data_size, static_cast<char *>(staging));
+}
size_t VulkanTexture1D::get_data_size() const
{
protected:
VulkanTexture1D();
+ virtual void fill_image_info(void *) const;
void sub_image(unsigned, int, unsigned, const void *);
public:
+#include "device.h"
+#include "texture2d.h"
#include "texture2d_backend.h"
#include "vulkan.h"
Texture(VK_IMAGE_VIEW_TYPE_2D)
{ }
-void VulkanTexture2D::sub_image(unsigned, int, int, unsigned, unsigned, const void *)
+void VulkanTexture2D::fill_image_info(void *ii) const
{
- throw logic_error("Texture2D::sub_image is unimplemented");
+ const Texture2D &self = *static_cast<const Texture2D *>(this);
+
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->imageType = VK_IMAGE_TYPE_2D;
+ image_info->extent.width = self.width;
+ image_info->extent.height = self.height;
+ image_info->mipLevels = self.levels;
+}
+
+void VulkanTexture2D::sub_image(unsigned level, int x, int y, unsigned wd, unsigned ht, const void *data)
+{
+ const Texture2D &self = *static_cast<const Texture2D *>(this);
+
+ auto level_size = self.get_level_size(level);
+ synchronize(-1, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (x==0 && y==0 && wd==level_size.x && ht==level_size.y));
+
+ size_t data_size = wd*ht*get_pixel_size(storage_fmt);
+ void *staging = device.get_transfer_queue().prepare_transfer(data_size,
+ [this, level, x, y, wd, ht](VkCommandBuffer cmd_buf, VkBuffer staging_buf, size_t src_off){
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkBufferImageCopy region = { };
+ region.bufferOffset = src_off;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.mipLevel = level;
+ region.imageSubresource.baseArrayLayer = 0;
+ region.imageSubresource.layerCount = 1;
+ region.imageOffset = { x, y, 0 };
+ region.imageExtent = { wd, ht, 1 };
+ vk.CmdCopyBufferToImage(cmd_buf, staging_buf, handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
+ });
+
+ const char *src = static_cast<const char *>(data);
+ copy(src, src+data_size, static_cast<char *>(staging));
}
Resource::AsyncLoader *VulkanTexture2D::load(IO::Seekable &, const Resources *)
protected:
VulkanTexture2D();
+ virtual void fill_image_info(void *) const;
void sub_image(unsigned, int, int, unsigned, unsigned, const void *);
public:
Texture3D(VK_IMAGE_VIEW_TYPE_2D_ARRAY)
{ }
+void VulkanTexture2DArray::fill_image_info(void *ii) const
+{
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->imageType = VK_IMAGE_TYPE_2D;
+ image_info->extent.width = width;
+ image_info->extent.height = height;
+ image_info->mipLevels = levels;
+ image_info->arrayLayers = depth;
+}
+
} // namespace GL
} // namespace Msp
{
protected:
VulkanTexture2DArray();
+
+ virtual void fill_image_info(void *) const;
};
using Texture2DArrayBackend = VulkanTexture2DArray;
+#include "frameformat.h"
+#include "texture2dmultisample.h"
#include "texture2dmultisample_backend.h"
#include "vulkan.h"
throw std::logic_error("VulkanTexture2DMultisample is unimplemented");
}
+void VulkanTexture2DMultisample::fill_image_info(void *ii) const
+{
+ const Texture2DMultisample &self = *static_cast<const Texture2DMultisample *>(this);
+
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->imageType = VK_IMAGE_TYPE_2D;
+ image_info->extent.width = self.width;
+ image_info->extent.height = self.height;
+ image_info->samples = static_cast<VkSampleCountFlagBits>(get_vulkan_samples(self.samples));
+}
+
size_t VulkanTexture2DMultisample::get_data_size() const
{
return 0;
protected:
VulkanTexture2DMultisample();
+ virtual void fill_image_info(void *) const;
+
public:
virtual AsyncLoader *load(IO::Seekable &, const Resources * = 0) { return 0; }
virtual std::size_t get_data_size() const;
+#include "device.h"
+#include "texture3d.h"
#include "texture3d_backend.h"
#include "vulkan.h"
+using namespace std;
+
namespace Msp {
namespace GL {
VulkanTexture3D::VulkanTexture3D():
Texture(VK_IMAGE_VIEW_TYPE_3D)
-{
- throw std::logic_error("VulkanTexture3D is unimplemented");
-}
+{ }
VulkanTexture3D::VulkanTexture3D(unsigned t):
Texture(t)
{ }
-void VulkanTexture3D::sub_image(unsigned, int, int, int, unsigned, unsigned, unsigned, const void *)
+void VulkanTexture3D::fill_image_info(void *ii) const
+{
+ const Texture3D &self = *static_cast<const Texture3D *>(this);
+
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->imageType = VK_IMAGE_TYPE_3D;
+ image_info->extent.width = self.width;
+ image_info->extent.height = self.height;
+ image_info->extent.depth = self.depth;
+ image_info->mipLevels = self.levels;
+}
+
+void VulkanTexture3D::sub_image(unsigned level, int x, int y, int z, unsigned wd, unsigned ht, unsigned dp, const void *data)
{
+ const Texture3D &self = *static_cast<const Texture3D *>(this);
+
+ auto level_size = self.get_level_size(level);
+ int layer = (is_array() && dp==1 ? z : -1);
+ synchronize(layer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (x==0 && y==0 && z==0 && wd==level_size.x && ht==level_size.y && dp==level_size.z));
+
+ size_t data_size = wd*ht*dp*get_pixel_size(storage_fmt);
+ void *staging = device.get_transfer_queue().prepare_transfer(data_size,
+ [this, level, x, y, z, wd, ht, dp](VkCommandBuffer cmd_buf, VkBuffer staging_buf, size_t src_off){
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkBufferImageCopy region = { };
+ region.bufferOffset = src_off;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.mipLevel = level;
+ region.imageSubresource.baseArrayLayer = (is_array() ? z : 0);
+ region.imageSubresource.layerCount = (is_array() ? dp : 1);
+ region.imageOffset = { x, y, (is_array() ? 0 : z) };
+ region.imageExtent = { wd, ht, (is_array() ? 1 : dp) };
+ vk.CmdCopyBufferToImage(cmd_buf, staging_buf, handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
+ });
+
+ const char *src = static_cast<const char *>(data);
+ copy(src, src+data_size, static_cast<char *>(staging));
}
bool VulkanTexture3D::is_array() const
VulkanTexture3D();
VulkanTexture3D(unsigned);
+ virtual void fill_image_info(void *) const;
void sub_image(unsigned, int, int, int, unsigned, unsigned, unsigned, const void *);
bool is_array() const;
#include "device.h"
#include "error.h"
+#include "synchronizer.h"
#include "texture.h"
#include "texture_backend.h"
#include "vulkan.h"
device(other.device),
handle(other.handle),
view_handle(other.view_handle),
+ memory_id(other.memory_id),
view_type(other.view_type),
debug_name(move(other.debug_name))
{
other.handle = 0;
other.view_handle = 0;
+ other.memory_id = 0;
}
VulkanTexture::~VulkanTexture()
{
+ DestroyQueue &dq = device.get_destroy_queue();
+
if(view_handle)
- device.get_destroy_queue().destroy(view_handle);
+ dq.destroy(view_handle);
+ if(handle)
+ dq.destroy(handle, memory_id);
}
void VulkanTexture::allocate()
const Texture &self = *static_cast<const Texture *>(this);
const VulkanFunctions &vk = device.get_functions();
+ VkImageCreateInfo image_info = { };
+ image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+ image_info.format = static_cast<VkFormat>(get_vulkan_pixelformat(self.storage_fmt));
+ image_info.extent.width = 1;
+ image_info.extent.height = 1;
+ image_info.extent.depth = 1;
+ image_info.mipLevels = 1;
+ image_info.arrayLayers = 1;
+ image_info.samples = VK_SAMPLE_COUNT_1_BIT;
+ image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
+ image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
+ image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+
+ image_info.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+ PixelComponents comp = get_components(self.storage_fmt);
+ if(comp==DEPTH_COMPONENT || comp==STENCIL_INDEX)
+ image_info.usage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+ else
+ image_info.usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+
+ fill_image_info(&image_info);
+
+ /* SwapChainTexture may have already provided the image. Create_info is
+ filled anyway because some of its fields are used for view_info. */
if(!handle)
- throw logic_error("Texture image allocation is unimplemented");
+ {
+ vk.CreateImage(image_info, handle);
+ memory_id = device.get_allocator().allocate(handle, DEVICE_MEMORY);
+
+ // Trigger a layout transition if the image is used before uploading data.
+ synchronize(-1, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, true);
+ }
VkImageViewCreateInfo view_info = { };
view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
view_info.image = handle_cast<::VkImage>(handle);
view_info.viewType = static_cast<VkImageViewType>(view_type);
- view_info.format = static_cast<VkFormat>(get_vulkan_pixelformat(self.storage_fmt));
+ view_info.format = image_info.format;
const unsigned *swizzle_order = get_vulkan_swizzle(self.swizzle);
view_info.components.r = static_cast<VkComponentSwizzle>(swizzle_order[0]);
view_info.subresourceRange.aspectMask = get_vulkan_aspect(get_components(self.storage_fmt));
view_info.subresourceRange.baseMipLevel = 0;
- view_info.subresourceRange.levelCount = 1;
+ view_info.subresourceRange.levelCount = image_info.mipLevels;
view_info.subresourceRange.baseArrayLayer = 0;
- view_info.subresourceRange.layerCount = 1;
+ view_info.subresourceRange.layerCount = image_info.arrayLayers;
vk.CreateImageView(view_info, view_handle);
throw logic_error("VulkanTexture::generate_mipmap is unimplemented");
}
+void VulkanTexture::synchronize(int layer, unsigned layout, bool discard) const
+{
+ unsigned aspect = get_vulkan_aspect(get_components(static_cast<const Texture *>(this)->storage_fmt));
+ device.get_synchronizer().access(handle, aspect, layer, layout, discard);
+}
+
void VulkanTexture::set_debug_name(const string &name)
{
#ifdef DEBUG
#ifdef DEBUG
const VulkanFunctions &vk = device.get_functions();
- string view_name = debug_name+"/view";
VkDebugUtilsObjectNameInfoEXT name_info = { };
name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+ name_info.objectType = VK_OBJECT_TYPE_IMAGE;
+ name_info.objectHandle = reinterpret_cast<uint64_t>(handle);
+ name_info.pObjectName = debug_name.c_str();
+ vk.SetDebugUtilsObjectName(name_info);
+
+ string view_name = debug_name+"/view";
name_info.objectType = VK_OBJECT_TYPE_IMAGE_VIEW;
name_info.objectHandle = reinterpret_cast<uint64_t>(view_handle);
name_info.pObjectName = view_name.c_str();
Device &device;
VkImage handle = 0;
VkImageView view_handle = 0;
+ unsigned memory_id = 0;
unsigned view_type;
std::string debug_name;
~VulkanTexture();
void allocate();
+ virtual void fill_image_info(void *) const = 0;
void require_swizzle() { }
void generate_mipmap();
+ void synchronize(int, unsigned, bool = false) const;
+
void set_debug_name(const std::string &);
void set_vulkan_object_names() const;
};
+#include "device.h"
+#include "texturecube.h"
#include "texturecube_backend.h"
#include "vulkan.h"
VulkanTextureCube::VulkanTextureCube():
Texture(VK_IMAGE_VIEW_TYPE_CUBE)
+{ }
+
+void VulkanTextureCube::fill_image_info(void *ii) const
{
- throw std::logic_error("VulkanTextureCube is unimplemented");
+ const TextureCube &self = *static_cast<const TextureCube *>(this);
+
+ VkImageCreateInfo *image_info = static_cast<VkImageCreateInfo *>(ii);
+ image_info->flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+ image_info->imageType = VK_IMAGE_TYPE_2D;
+ image_info->extent.width = self.size;
+ image_info->extent.height = self.size;
+ image_info->mipLevels = self.levels;
+ image_info->arrayLayers = 6;
}
-void VulkanTextureCube::sub_image(unsigned, unsigned, int, int, unsigned, unsigned, const void *)
+void VulkanTextureCube::sub_image(unsigned face, unsigned level, int x, int y, unsigned wd, unsigned ht, const void *data)
{
+ const TextureCube &self = *static_cast<const TextureCube *>(this);
+
+ unsigned level_size = self.get_level_size(level);
+ synchronize(face, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (x==0 && y==0 && wd==level_size && ht==level_size));
+
+ size_t data_size = wd*ht*get_pixel_size(storage_fmt);
+ void *staging = device.get_transfer_queue().prepare_transfer(data_size,
+ [this, face, level, x, y, wd, ht](VkCommandBuffer cmd_buf, VkBuffer staging_buf, size_t src_off){
+ const VulkanFunctions &vk = device.get_functions();
+
+ VkBufferImageCopy region = { };
+ region.bufferOffset = src_off;
+ region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ region.imageSubresource.mipLevel = level;
+ region.imageSubresource.baseArrayLayer = face;
+ region.imageSubresource.layerCount = 1;
+ region.imageOffset = { x, y, 0 };
+ region.imageExtent = { wd, ht, 1 };
+ vk.CmdCopyBufferToImage(cmd_buf, staging_buf, handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
+ });
+
+ const char *src = static_cast<const char *>(data);
+ copy(src, src+data_size, static_cast<char *>(staging));
}
size_t VulkanTextureCube::get_data_size() const
protected:
VulkanTextureCube();
+ virtual void fill_image_info(void *) const;
void sub_image(unsigned, unsigned, int, int, unsigned, unsigned, const void *);
public:
// 12
vkCreateBuffer(context.get_function<PFN_vkCreateBuffer>("vkCreateBuffer")),
vkDestroyBuffer(context.get_function<PFN_vkDestroyBuffer>("vkDestroyBuffer")),
+ vkCreateImage(context.get_function<PFN_vkCreateImage>("vkCreateImage")),
+ vkDestroyImage(context.get_function<PFN_vkDestroyImage>("vkDestroyImage")),
vkCreateImageView(context.get_function<PFN_vkCreateImageView>("vkCreateImageView")),
vkDestroyImageView(context.get_function<PFN_vkDestroyImageView>("vkDestroyImageView")),
vkGetBufferMemoryRequirements(context.get_function<PFN_vkGetBufferMemoryRequirements>("vkGetBufferMemoryRequirements")),
+ vkGetImageMemoryRequirements(context.get_function<PFN_vkGetImageMemoryRequirements>("vkGetImageMemoryRequirements")),
vkBindBufferMemory(context.get_function<PFN_vkBindBufferMemory>("vkBindBufferMemory")),
+ vkBindImageMemory(context.get_function<PFN_vkBindImageMemory>("vkBindImageMemory")),
+ // 13
+ vkCreateSampler(context.get_function<PFN_vkCreateSampler>("vkCreateSampler")),
+ vkDestroySampler(context.get_function<PFN_vkDestroySampler>("vkDestroySampler")),
// 14
vkCreateDescriptorSetLayout(context.get_function<PFN_vkCreateDescriptorSetLayout>("vkCreateDescriptorSetLayout")),
vkDestroyDescriptorSetLayout(context.get_function<PFN_vkDestroyDescriptorSetLayout>("vkDestroyDescriptorSetLayout")),
vkCmdPushConstants(context.get_function<PFN_vkCmdPushConstants>("vkCmdPushConstants")),
// 19
vkCmdCopyBuffer(context.get_function<PFN_vkCmdCopyBuffer>("vkCmdCopyBuffer")),
+ vkCmdCopyBufferToImage(context.get_function<PFN_vkCmdCopyBufferToImage>("vkCmdCopyBufferToImage")),
// 20
vkCmdBindIndexBuffer(context.get_function<PFN_vkCmdBindIndexBuffer>("vkCmdBindIndexBuffer")),
vkCmdDrawIndexed(context.get_function<PFN_vkCmdDrawIndexed>("vkCmdDrawIndexed")),
PFN_vkUnmapMemory vkUnmapMemory = 0; // 11.2.9
PFN_vkCreateBuffer vkCreateBuffer = 0; // 12.1
PFN_vkDestroyBuffer vkDestroyBuffer = 0; // 12.1
+ PFN_vkCreateImage vkCreateImage = 0; // 12.3
+ PFN_vkDestroyImage vkDestroyImage = 0; // 12.3
PFN_vkCreateImageView vkCreateImageView = 0; // 12.5
PFN_vkDestroyImageView vkDestroyImageView = 0; // 12.5
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; // 12.7
+ PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; // 12.7
PFN_vkBindBufferMemory vkBindBufferMemory = 0; // 12.7
+ PFN_vkBindImageMemory vkBindImageMemory = 0; // 12.7
+ PFN_vkCreateSampler vkCreateSampler = 0; // 13
+ PFN_vkDestroySampler vkDestroySampler = 0; // 13
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; // 14.2.1
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; // 14.2.1
PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; // 14.2.2
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; // 14.2.7
PFN_vkCmdPushConstants vkCmdPushConstants = 0; // 14.2.10
PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; // 19.2
+ PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; // 19.4
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; // 20.3
PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; // 20.3
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; // 21.2
void DestroyBuffer(VkBuffer image) const
{ vkDestroyBuffer(device, handle_cast<::VkBuffer>(image), 0); }
+ Result CreateImage(const VkImageCreateInfo &rCreateInfo, VkImage &rImage) const
+ { return { vkCreateImage(device, &rCreateInfo, 0, handle_cast<::VkImage *>(&rImage)), "vkCreateImage" }; }
+
+ void DestroyImage(VkImage image) const
+ { vkDestroyImage(device, handle_cast<::VkImage>(image), 0); }
+
Result CreateImageView(const VkImageViewCreateInfo &rCreateInfo, VkImageView &rView) const
{ return { vkCreateImageView(device, &rCreateInfo, 0, handle_cast<::VkImageView *>(&rView)), "vkCreateImageView" }; }
void GetBufferMemoryRequirements(VkBuffer image, VkMemoryRequirements &rMemoryRequirements) const
{ vkGetBufferMemoryRequirements(device, handle_cast<::VkBuffer>(image), &rMemoryRequirements); }
+ void GetImageMemoryRequirements(VkImage image, VkMemoryRequirements &rMemoryRequirements) const
+ { vkGetImageMemoryRequirements(device, handle_cast<::VkImage>(image), &rMemoryRequirements); }
+
Result BindBufferMemory(VkBuffer image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const
{ return { vkBindBufferMemory(device, handle_cast<::VkBuffer>(image), handle_cast<::VkDeviceMemory>(memory), memoryOffset), "vkBindBufferMemory" }; }
+ Result BindImageMemory(VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const
+ { return { vkBindImageMemory(device, handle_cast<::VkImage>(image), handle_cast<::VkDeviceMemory>(memory), memoryOffset), "vkBindImageMemory" }; }
+
+ // Chapter 13: Samplers
+ Result CreateSampler(const VkSamplerCreateInfo &rCreateInfo, VkSampler &rSampler) const
+ { return { vkCreateSampler(device, &rCreateInfo, 0, handle_cast<::VkSampler *>(&rSampler)), "vkCreateSampler" }; }
+
+ void DestroySampler(VkSampler sampler) const
+ { vkDestroySampler(device, handle_cast<::VkSampler>(sampler), 0); }
+
// Chapter 14: Resource Descriptors
Result CreateDescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo &rCreateInfo, VkDescriptorSetLayout &rSetLayout) const
{ return { vkCreateDescriptorSetLayout(device, &rCreateInfo, 0, handle_cast<::VkDescriptorSetLayout *>(&rSetLayout)), "vkCreateDescriptorSetLayout" }; }
void CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, std::uint32_t regionCount, const VkBufferCopy *pRegions) const
{ vkCmdCopyBuffer(handle_cast<::VkCommandBuffer>(commandBuffer), handle_cast<::VkBuffer>(srcBuffer), handle_cast<::VkBuffer>(dstBuffer), regionCount, pRegions); }
+ void CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, std::uint32_t regionCount, const VkBufferImageCopy *pRegions) const
+ { vkCmdCopyBufferToImage(handle_cast<::VkCommandBuffer>(commandBuffer), handle_cast<::VkBuffer>(srcBuffer), handle_cast<::VkImage>(dstImage), dstImageLayout, regionCount, pRegions); }
+
// Chapter 20: Drawing Commands
void CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const
{ vkCmdBindIndexBuffer(handle_cast<::VkCommandBuffer>(commandBuffer), handle_cast<::VkBuffer>(buffer), offset, indexType); }