X-Git-Url: http://git.tdb.fi/?a=blobdiff_plain;f=source%2Fbackends%2Fvulkan%2Fpipelinestate_backend.cpp;h=d14bc00c6539bb3c9509fab26abf4610cd151d1f;hb=c737d292d49d2d39b44e1cd903c1f8b79174f0d7;hp=e43215091300a25bf4c1f01d5ce8260c0b8df099;hpb=9c62dbb21e979d23c2e7cf0aff3746402718c7b2;p=libs%2Fgl.git diff --git a/source/backends/vulkan/pipelinestate_backend.cpp b/source/backends/vulkan/pipelinestate_backend.cpp index e4321509..d14bc00c 100644 --- a/source/backends/vulkan/pipelinestate_backend.cpp +++ b/source/backends/vulkan/pipelinestate_backend.cpp @@ -1,3 +1,4 @@ +#include #include #include "batch.h" #include "blend.h" @@ -40,6 +41,12 @@ void VulkanPipelineState::update() const if(changes&PipelineState::VERTEX_SETUP) self.vertex_setup->refresh(); + if(changes&PipelineState::SHPROG) + { + push_const_compat = hash<32>(self.shprog->stage_flags); + push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size()); + } + constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL| PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE; if(changes&pipeline_mask) @@ -75,10 +82,15 @@ void VulkanPipelineState::update() const if(changed_sets) { - descriptor_set_handles.resize(self.shprog->get_n_descriptor_sets()); - for(unsigned i=0; iget_n_descriptor_sets()); + first_changed_desc_set = descriptor_set_slots.size(); + for(unsigned i=0; i(this); uint64_t result = hash<64>(0, 0); - for(const PipelineState::BoundUniformBlock &b: self.uniform_blocks) - if(b.used && b.binding>=0 && static_cast(b.binding>>20)==index) + bool empty = true; + + auto i = lower_bound_member(self.uniform_blocks, static_cast(index)<<20, &PipelineState::BoundUniformBlock::binding); + for(; (i!=self.uniform_blocks.end() && static_cast(i->binding)>>20==index); ++i) + if(i->used) { - result = hash_update<64>(result, b.binding); - result = hash_update<64>(result, reinterpret_cast(b.block)); - result = hash_update<64>(result, reinterpret_cast(b.buffer->handle)); + result = hash_update<64>(result, i->binding); + result = hash_update<64>(result, reinterpret_cast(i->block)); + result = hash_update<64>(result, reinterpret_cast(i->buffer->handle)); + empty = false; } - for(const PipelineState::BoundTexture &t: self.textures) - if(t.used && (t.binding>>20)==index) + + auto j = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding); + for(; (j!=self.textures.end() && j->binding>>20==index); ++j) + if(j->used) { - result = hash_update<64>(result, t.binding); - result = hash_update<64>(result, reinterpret_cast(t.texture->handle)); - result = hash_update<64>(result, reinterpret_cast(t.sampler->handle)); - result = hash_update<64>(result, t.level); + result = hash_update<64>(result, j->binding); + result = hash_update<64>(result, reinterpret_cast(j->texture->handle)); + result = hash_update<64>(result, reinterpret_cast(j->sampler->handle)); + result = hash_update<64>(result, j->level); + empty = false; } + if(!empty) + result = hash_update<64>(result, self.shprog->stage_flags); + return result; } +bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const +{ + const PipelineState &self = *static_cast(this); + + auto i = lower_bound_member(self.uniform_blocks, static_cast(index)<<20, &PipelineState::BoundUniformBlock::binding); + for(; (i!=self.uniform_blocks.end() && static_cast(i->binding)>>20==index); ++i) + if(i->used && i->buffer && i->buffer->get_usage()==STREAMING) + return true; + + return false; +} + VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const { return static_cast(this)->shprog->desc_set_layout_handles[index]; } -unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, vector &buffer) const +unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector &buffer) const { const PipelineState &self = *static_cast(this); + auto u_begin = lower_bound_member(self.uniform_blocks, static_cast(index)<<20, &PipelineState::BoundUniformBlock::binding); + auto t_begin = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding); + unsigned n_buffers = 0; - for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks) - if(u.used && u.binding>=0 && static_cast(u.binding>>20)==index) + for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast(i->binding)>>20==index); ++i) + if(i->used) ++n_buffers; unsigned n_images = 0; - for(const PipelineState::BoundTexture &t: self.textures) - if(t.used && (t.binding>>20)==index) + for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i) + if(i->used) ++n_images; unsigned n_writes = n_buffers+n_images; @@ -307,35 +344,37 @@ unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, vector=0 && static_cast(u.binding>>20)==index) + for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast(i->binding)>>20==index); ++i) + if(i->used) { - buffer_ptr->buffer = handle_cast<::VkBuffer>(u.buffer->handle); - buffer_ptr->offset = u.block->get_offset(); - buffer_ptr->range = u.block->get_data_size(); + buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle); + buffer_ptr->offset = i->block->get_offset(); + if(i->buffer->get_usage()==STREAMING) + buffer_ptr->offset += frame*i->buffer->get_size(); + buffer_ptr->range = i->block->get_data_size(); write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - write_ptr->dstBinding = u.binding&0xFFFFF; + write_ptr->dstBinding = i->binding&0xFFFFF; write_ptr->descriptorCount = 1; - write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC; + write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; write_ptr->pBufferInfo = buffer_ptr; ++buffer_ptr; ++write_ptr; } - for(const PipelineState::BoundTexture &t: self.textures) - if(t.used && (t.binding>>20)==index) + for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i) + if(i->used) { - image_ptr->sampler = handle_cast<::VkSampler>(t.sampler->handle); - if(t.level<0) - image_ptr->imageView = handle_cast<::VkImageView>(t.texture->view_handle); + image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle); + if(i->level<0) + image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle); else - image_ptr->imageView = handle_cast<::VkImageView>(t.texture->mip_view_handles[t.level]); + image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->level]); image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - write_ptr->dstBinding = t.binding&0xFFFFF; + write_ptr->dstBinding = i->binding&0xFFFFF; write_ptr->descriptorCount = 1; write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; write_ptr->pImageInfo = image_ptr; @@ -353,18 +392,29 @@ void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipe const VulkanFunctions &vk = device.get_functions(); if(!last) + { unapplied = ~0U; + first_changed_desc_set = 0; + } else if(last!=this) { const PipelineState &last_ps = *static_cast(last); if(handle!=last->handle) + { unapplied |= PipelineState::SHPROG; + if(self.push_const_compat!=last_ps.push_const_compat) + { + unapplied |= PipelineState::UNIFORMS; + first_changed_desc_set = 0; + } + } if(self.vertex_setup!=last_ps.vertex_setup) unapplied |= PipelineState::VERTEX_SETUP; - for(unsigned i=0; (idescriptor_set_handles.size()); ++i) - if(descriptor_set_handles[i]!=last->descriptor_set_handles[i]) + for(unsigned i=0; i=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i]) { unapplied |= PipelineState::UNIFORMS; + first_changed_desc_set = min(first_changed_desc_set, i); break; } if(self.viewport!=last_ps.viewport) @@ -395,21 +445,16 @@ void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipe } } - if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_handles.empty()) + if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_slots.empty()) { - vector dynamic_offsets; - dynamic_offsets.reserve(self.uniform_blocks.size()); - for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks) - if(u.used && u.binding>=0) - { - if(u.buffer->get_usage()==STREAMING) - dynamic_offsets.push_back(frame*u.buffer->get_size()); - else - dynamic_offsets.push_back(0); - } + vector descriptor_set_handles; + descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set); + for(unsigned i=first_changed_desc_set; ilayout_handle, - 0, descriptor_set_handles.size(), descriptor_set_handles.data(), dynamic_offsets.size(), dynamic_offsets.data()); + first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0); } if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR)) @@ -447,6 +492,7 @@ void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipe } unapplied = 0; + first_changed_desc_set = descriptor_set_slots.size(); } } // namespace GL