Descriptor sets have been moved from PipelineCache to a dedicated class.
PipelineState refers to descriptor sets using slot indices, allowing the
descriptor pool to be recreated if more space is needed.
Dynamic uniform buffers are no longer used. Instead multiple copies of
the descriptor set are created if it contains dynamic uniform blocks.
current_pool->primary.next_buffer = 0;
current_pool->secondary.next_buffer = 0;
}
+
+ device.get_descriptor_pool().begin_frame();
}
void VulkanCommands::submit_frame(Semaphore *wait_sem, Semaphore *signal_sem)
destroy<VkBuffer, &VulkanFunctions::DestroyBuffer>(handle, mem_id);
}
+void DestroyQueue::destroy(VkDescriptorPool handle)
+{
+ destroy<VkDescriptorPool, &VulkanFunctions::DestroyDescriptorPool>(handle);
+}
+
void DestroyQueue::destroy(VkFence handle)
{
destroy<VkFence, &VulkanFunctions::DestroyFence>(handle);
~DestroyQueue();
void destroy(VkBuffer, unsigned);
+ void destroy(VkDescriptorPool);
void destroy(VkFence);
void destroy(VkFramebuffer);
void destroy(VkImage, unsigned);
destroy_queue(*static_cast<Device *>(this)),
synchronizer(*static_cast<Device *>(this)),
transfer_queue(*static_cast<Device *>(this)),
- pipeline_cache(*static_cast<Device *>(this))
+ pipeline_cache(*static_cast<Device *>(this)),
+ descriptor_pool(*static_cast<Device *>(this))
{ }
// Cause the destructor of RefPtr<VulkanFunctions> to be emitted here
#include <msp/core/noncopyable.h>
#include <msp/graphics/vulkancontext.h>
+#include "descriptorpool.h"
#include "destroyqueue.h"
#include "handles.h"
#include "memoryallocator.h"
Synchronizer synchronizer;
TransferQueue transfer_queue;
PipelineCache pipeline_cache;
+ DescriptorPool descriptor_pool;
unsigned n_frames_in_flight = 3;
VulkanDevice(Graphics::Window &, const Graphics::VulkanOptions &);
Synchronizer &get_synchronizer() { return synchronizer; }
TransferQueue &get_transfer_queue() { return transfer_queue; }
PipelineCache &get_pipeline_cache() { return pipeline_cache; }
+ DescriptorPool &get_descriptor_pool() { return descriptor_pool; }
unsigned get_n_frames_in_flight() const { return n_frames_in_flight; }
};
PipelineCache::PipelineCache(Device &d):
device(d)
-{
- const VulkanFunctions &vk = device.get_functions();
-
- VkDescriptorPoolSize pool_sizes[2] = { };
- pool_sizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
- pool_sizes[0].descriptorCount = 10000;
- pool_sizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- pool_sizes[1].descriptorCount = 10000;
-
- VkDescriptorPoolCreateInfo pool_info = { };
- pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- pool_info.maxSets = 10000;
- pool_info.poolSizeCount = 2;
- pool_info.pPoolSizes = pool_sizes;
-
- vk.CreateDescriptorPool(pool_info, descriptor_pool);
-}
+{ }
PipelineCache::~PipelineCache()
{
vk.DestroyRenderPass(kvp.second);
for(const auto &kvp: pipelines)
vk.DestroyPipeline(kvp.second);
- vk.DestroyDescriptorPool(descriptor_pool);
}
VkRenderPass PipelineCache::get_render_pass(const FrameFormat &format, bool clear, bool discard, bool to_present)
return pipeline;
}
-VkDescriptorSet PipelineCache::get_descriptor_set(const PipelineState &ps, unsigned index)
-{
- const VulkanFunctions &vk = device.get_functions();
-
- uint64_t key = ps.compute_descriptor_set_hash(index);
- auto i = descriptor_sets.find(key);
- if(i!=descriptor_sets.end())
- return i->second;
-
- VkDescriptorSetLayout layout = ps.get_descriptor_set_layout(index);
-
- VkDescriptorSetAllocateInfo alloc_info = { };
- alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
- alloc_info.descriptorPool = handle_cast<::VkDescriptorPool>(descriptor_pool);
- alloc_info.descriptorSetCount = 1;
- alloc_info.pSetLayouts = handle_cast<::VkDescriptorSetLayout *>(&layout);
-
- VkDescriptorSet desc_set;
- vk.AllocateDescriptorSets(alloc_info, &desc_set);
-
- vector<char> buffer;
- unsigned n_writes = ps.fill_descriptor_writes(index, buffer);
- VkWriteDescriptorSet *writes = reinterpret_cast<VkWriteDescriptorSet *>(buffer.data());
- for(unsigned j=0; j<n_writes; ++j)
- writes[j].dstSet = handle_cast<::VkDescriptorSet>(desc_set);
-
- vk.UpdateDescriptorSets(n_writes, writes, 0, 0);
-
- descriptor_sets.insert(make_pair(key, desc_set));
-
- return desc_set;
-}
-
} // namespace GL
} // namespace Msp
{
private:
Device &device;
- VkDescriptorPool descriptor_pool;
std::map<std::uint64_t, VkRenderPass> render_passes;
std::map<std::uint64_t, VkPipeline> pipelines;
- std::map<std::uint64_t, VkDescriptorSet> descriptor_sets;
public:
PipelineCache(Device &);
VkRenderPass get_render_pass(const FrameFormat &, bool, bool, bool);
VkPipeline get_pipeline(const PipelineState &);
- VkDescriptorSet get_descriptor_set(const PipelineState &, unsigned);
};
} // namespace GL
if(changed_sets)
{
- descriptor_set_handles.resize(self.shprog->get_n_descriptor_sets());
- for(unsigned i=0; i<descriptor_set_handles.size(); ++i)
+ descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
+ for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
if(changed_sets&(1<<i))
- descriptor_set_handles[i] = device.get_pipeline_cache().get_descriptor_set(self, i);
+ descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
unapplied |= PipelineState::UNIFORMS;
}
}
return result;
}
+bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
+ for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used && i->buffer && i->buffer->get_usage()==STREAMING)
+ return true;
+
+ return false;
+}
+
VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
{
return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
}
-unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, vector<char> &buffer) const
+unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
{
buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
buffer_ptr->offset = i->block->get_offset();
+ if(i->buffer->get_usage()==STREAMING)
+ buffer_ptr->offset += frame*i->buffer->get_size();
buffer_ptr->range = i->block->get_data_size();
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
- write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
write_ptr->pBufferInfo = buffer_ptr;
++buffer_ptr;
unapplied |= PipelineState::SHPROG;
if(self.vertex_setup!=last_ps.vertex_setup)
unapplied |= PipelineState::VERTEX_SETUP;
- for(unsigned i=0; (i<descriptor_set_handles.size() && i<last->descriptor_set_handles.size()); ++i)
- if(descriptor_set_handles[i]!=last->descriptor_set_handles[i])
+ for(unsigned i=0; (i<descriptor_set_slots.size() && i<last->descriptor_set_slots.size()); ++i)
+ if(descriptor_set_slots[i]!=last->descriptor_set_slots[i])
{
unapplied |= PipelineState::UNIFORMS;
break;
}
}
- if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_handles.empty())
+ if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_slots.empty())
{
- vector<uint32_t> dynamic_offsets;
- dynamic_offsets.reserve(self.uniform_blocks.size());
- for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
- if(u.used && u.binding>=0)
- {
- if(u.buffer->get_usage()==STREAMING)
- dynamic_offsets.push_back(frame*u.buffer->get_size());
- else
- dynamic_offsets.push_back(0);
- }
+ vector<VkDescriptorSet> descriptor_set_handles;
+ descriptor_set_handles.reserve(self.descriptor_set_slots.size());
+ for(unsigned i=0; i<self.descriptor_set_slots.size(); ++i)
+ descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(self.descriptor_set_slots[i], self, i, frame));
vk.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
- 0, descriptor_set_handles.size(), descriptor_set_handles.data(), dynamic_offsets.size(), dynamic_offsets.data());
+ 0, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
}
if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
class VulkanPipelineState: public NonCopyable
{
+ friend class DescriptorPool;
friend class PipelineCache;
friend class VulkanCommands;
mutable unsigned changes = 0;
mutable unsigned unapplied = 0;
mutable VkPipeline handle;
- mutable std::vector<VkDescriptorSet> descriptor_set_handles;
+ mutable std::vector<unsigned> descriptor_set_slots;
VulkanPipelineState();
VulkanPipelineState(VulkanPipelineState &&);
std::uint64_t compute_hash() const;
void fill_creation_info(std::vector<char> &) const;
std::uint64_t compute_descriptor_set_hash(unsigned) const;
+ bool is_descriptor_set_dynamic(unsigned) const;
VkDescriptorSetLayout get_descriptor_set_layout(unsigned) const;
- unsigned fill_descriptor_writes(unsigned, std::vector<char> &) const;
+ unsigned fill_descriptor_writes(unsigned, unsigned, std::vector<char> &) const;
void apply(VkCommandBuffer, const VulkanPipelineState *, unsigned, bool) const;
};
bindings.emplace_back();
VkDescriptorSetLayoutBinding &binding = bindings.back();
binding.binding = b.bind_point;
- binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
binding.descriptorCount = 1;
binding.stageFlags = VK_SHADER_STAGE_ALL;
binding.pImmutableSamplers = 0;