#include "pipelinestate_backend.h"
#include "program.h"
#include "rect.h"
+#include "renderpass.h"
#include "sampler.h"
#include "stenciltest.h"
#include "structurebuilder.h"
if(changes&PipelineState::VERTEX_SETUP)
self.vertex_setup->refresh();
- constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
+ if(changes&PipelineState::SHPROG)
+ {
+ push_const_compat = hash<32>(self.shprog->stage_flags);
+ push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
+ }
+
+ constexpr unsigned graphics_mask = PipelineState::FRAMEBUFFER|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
+ unsigned pipeline_mask = PipelineState::SHPROG;
+ if(!self.shprog->is_compute())
+ pipeline_mask |= graphics_mask;
if(changes&pipeline_mask)
{
handle = device.get_pipeline_cache().get_pipeline(self);
unapplied |= PipelineState::SHPROG;
}
- if(changes&(PipelineState::SHPROG|PipelineState::UNIFORMS|PipelineState::TEXTURES))
+ if(changes&(PipelineState::SHPROG|PipelineState::RESOURCES))
{
unsigned changed_sets = (changes&PipelineState::SHPROG ? ~0U : 0U);
- for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
- if(u.changed || changed_sets==~0U)
+ for(const PipelineState::BoundResource &r: self.resources)
+ if(r.changed || changed_sets==~0U)
{
- if(u.block)
- u.used = self.shprog->uses_uniform_block_binding(u.binding);
- if(u.binding>=0)
- changed_sets |= 1<<(u.binding>>20);
- u.changed = false;
- }
- for(const PipelineState::BoundTexture &t: self.textures)
- if(t.changed || changed_sets==~0U)
- {
- if(t.texture && t.sampler)
- t.used = self.shprog->uses_texture_binding(t.binding);
- changed_sets |= 1<<(t.binding>>20);
- if(t.texture && t.level>=0)
- t.texture->refresh_mip_views();
- if(t.sampler)
- t.sampler->refresh();
- t.changed = false;
+ if(r.type==PipelineState::UNIFORM_BLOCK)
+ r.used = self.shprog->uses_uniform_block_binding(r.binding);
+ else if(r.type==PipelineState::SAMPLED_TEXTURE || r.type==PipelineState::STORAGE_TEXTURE)
+ {
+ r.used = self.shprog->uses_texture_binding(r.binding);
+ if(r.mip_level>=0)
+ r.texture->refresh_mip_views();
+ if(r.type==PipelineState::SAMPLED_TEXTURE)
+ r.sampler->refresh();
+ }
+ if(r.binding>=0)
+ changed_sets |= 1<<(r.binding>>20);
+ r.changed = false;
}
if(changed_sets)
{
- descriptor_set_handles.resize(self.shprog->get_n_descriptor_sets());
- for(unsigned i=0; i<descriptor_set_handles.size(); ++i)
+ descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
+ first_changed_desc_set = descriptor_set_slots.size();
+ for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
if(changed_sets&(1<<i))
- descriptor_set_handles[i] = device.get_pipeline_cache().get_descriptor_set(self, i);
- unapplied |= PipelineState::UNIFORMS;
+ {
+ descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
+ first_changed_desc_set = min(first_changed_desc_set, i);
+ }
+
+ unapplied |= PipelineState::RESOURCES;
}
}
uint64_t VulkanPipelineState::compute_hash() const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
- const FrameFormat &format = self.framebuffer->get_format();
uint64_t result = hash<64>(self.shprog);
- result = hash_update<64>(result, self.vertex_setup->compute_hash());
- result = hash_round<64>(result, self.primitive_type);
- if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
+ if(!self.shprog->is_compute())
{
- result = hash_round<64>(result, self.front_face);
- result = hash_round<64>(result, self.face_cull);
- }
+ const FrameFormat &format = self.framebuffer->get_format();
- result = hash_round<64>(result, format.get_samples());
+ result = hash_update<64>(result, self.vertex_setup->compute_hash());
+ result = hash_round<64>(result, self.primitive_type);
- if(self.depth_test.enabled)
- {
- result = hash_round<64>(result, self.depth_test.compare);
- result = hash_update<64>(result, self.depth_test.write);
- }
+ if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
+ {
+ result = hash_round<64>(result, self.front_face);
+ result = hash_round<64>(result, self.face_cull);
+ }
- if(self.stencil_test.enabled)
- {
- result = hash_round<64>(result, self.stencil_test.compare);
- result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
- result = hash_round<64>(result, self.stencil_test.depth_fail_op);
- result = hash_round<64>(result, self.stencil_test.depth_pass_op);
- result = hash_update<64>(result, self.stencil_test.reference);
- }
+ result = hash_round<64>(result, format.get_samples());
+ if(format.get_samples()>1)
+ result = hash_round<64>(result, self.blend.alpha_to_coverage);
- if(self.blend.enabled)
- {
- result = hash_round<64>(result, self.blend.equation);
- result = hash_round<64>(result, self.blend.src_factor);
- result = hash_round<64>(result, self.blend.dst_factor);
- result = hash_round<64>(result, self.blend.write_mask);
- }
+ if(self.depth_test.enabled)
+ {
+ result = hash_round<64>(result, self.depth_test.compare);
+ result = hash_update<64>(result, self.depth_test.write);
+ }
- for(FrameAttachment a: format)
- result = hash_update<64>(result, a);
+ if(self.stencil_test.enabled)
+ {
+ result = hash_round<64>(result, self.stencil_test.compare);
+ result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_pass_op);
+ result = hash_update<64>(result, self.stencil_test.reference);
+ }
+
+ if(self.blend.enabled)
+ {
+ result = hash_round<64>(result, self.blend.equation);
+ result = hash_round<64>(result, self.blend.src_factor);
+ result = hash_round<64>(result, self.blend.dst_factor);
+ result = hash_round<64>(result, self.blend.write_mask);
+ }
+
+ for(FrameAttachment a: format)
+ result = hash_update<64>(result, a);
+ }
return result;
}
void VulkanPipelineState::fill_creation_info(vector<char> &buffer) const
+{
+ if(static_cast<const PipelineState *>(this)->shprog->is_compute())
+ fill_compute_creation_info(buffer);
+ else
+ fill_graphics_creation_info(buffer);
+}
+
+void VulkanPipelineState::fill_graphics_creation_info(vector<char> &buffer) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
const FrameFormat &format = self.framebuffer->get_format();
- VkRenderPass render_pass = device.get_pipeline_cache().get_render_pass(format, false, false, false);
+ RenderPass render_pass;
+ render_pass.framebuffer = self.framebuffer;
+ render_pass.to_present = self.framebuffer->is_presentable();
+ render_pass.update(device);
unsigned n_color_attachments = 0;
for(FrameAttachment a: format)
}
StructureBuilder sb(buffer, 10);
- VkGraphicsPipelineCreateInfo *&pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
- VkPipelineInputAssemblyStateCreateInfo *&input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
- VkPipelineViewportStateCreateInfo *&viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
- VkPipelineRasterizationStateCreateInfo *&raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
- VkPipelineMultisampleStateCreateInfo *&multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
- VkPipelineDepthStencilStateCreateInfo *&depth_stencil_info = sb.add<VkPipelineDepthStencilStateCreateInfo>();
- VkPipelineColorBlendStateCreateInfo *&blend_info = sb.add<VkPipelineColorBlendStateCreateInfo>();
- VkPipelineColorBlendAttachmentState *&blend_attachments = sb.add<VkPipelineColorBlendAttachmentState>(n_color_attachments);
- VkPipelineDynamicStateCreateInfo *&dynamic_info = sb.add<VkPipelineDynamicStateCreateInfo>();
- VkDynamicState *&dynamic_states = sb.add<VkDynamicState>(2);
+ VkGraphicsPipelineCreateInfo *const &pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
+ VkPipelineInputAssemblyStateCreateInfo *const &input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
+ VkPipelineViewportStateCreateInfo *const &viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
+ VkPipelineRasterizationStateCreateInfo *const &raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
+ VkPipelineMultisampleStateCreateInfo *const &multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
+ VkPipelineDepthStencilStateCreateInfo *const &depth_stencil_info = sb.add<VkPipelineDepthStencilStateCreateInfo>();
+ VkPipelineColorBlendStateCreateInfo *const &blend_info = sb.add<VkPipelineColorBlendStateCreateInfo>();
+ VkPipelineColorBlendAttachmentState *const &blend_attachments = sb.add<VkPipelineColorBlendAttachmentState>(n_color_attachments);
+ VkPipelineDynamicStateCreateInfo *const &dynamic_info = sb.add<VkPipelineDynamicStateCreateInfo>();
+ VkDynamicState *const &dynamic_states = sb.add<VkDynamicState>(2);
input_assembly_info->sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
input_assembly_info->topology = static_cast<VkPrimitiveTopology>(get_vulkan_primitive_type(self.primitive_type));
multisample_info->sampleShadingEnable = VK_FALSE;
multisample_info->minSampleShading = 1.0f;
multisample_info->pSampleMask = 0;
- multisample_info->alphaToCoverageEnable = VK_FALSE;
+ multisample_info->alphaToCoverageEnable = (format.get_samples()>1 && self.blend.alpha_to_coverage ? VK_TRUE : VK_FALSE);
multisample_info->alphaToOneEnable = VK_FALSE;
depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
pipeline_info->pDepthStencilState = depth_stencil_info;
pipeline_info->pColorBlendState = blend_info;
pipeline_info->pDynamicState = dynamic_info;
- pipeline_info->renderPass = handle_cast<::VkRenderPass>(render_pass);
+ pipeline_info->renderPass = handle_cast<::VkRenderPass>(render_pass.handle);
pipeline_info->subpass = 0;
if(self.shprog)
pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
}
+void VulkanPipelineState::fill_compute_creation_info(vector<char> &buffer) const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ StructureBuilder sb(buffer, 1);
+ VkComputePipelineCreateInfo *const &pipeline_info = sb.add<VkComputePipelineCreateInfo>();
+
+ pipeline_info->sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+
+ if(self.shprog)
+ {
+ pipeline_info->stage = *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
+ pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
+ }
+}
+
uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
uint64_t result = hash<64>(0, 0);
+ bool empty = true;
- auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
- for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
- if(i->used)
+ auto i = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
+ for(; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ {
+ if(!i->used)
+ continue;
+
+ result = hash_update<64>(result, i->binding);
+ result = hash_update<64>(result, i->type);
+ if(i->type==PipelineState::UNIFORM_BLOCK)
{
- result = hash_update<64>(result, i->binding);
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
}
-
- auto j = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
- for(; (j!=self.textures.end() && j->binding>>20==index); ++j)
- if(j->used)
+ else if(i->type==PipelineState::SAMPLED_TEXTURE)
{
- result = hash_update<64>(result, j->binding);
- result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->texture->handle));
- result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->sampler->handle));
- result = hash_update<64>(result, j->level);
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->sampler->handle));
+ result = hash_update<64>(result, i->mip_level);
}
+ else if(i->type==PipelineState::STORAGE_TEXTURE)
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
+ empty = false;
+ }
+
+ if(!empty)
+ result = hash_update<64>(result, self.shprog->stage_flags);
return result;
}
+bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ auto i = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
+ for(; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used && i->type==PipelineState::UNIFORM_BLOCK && i->buffer->get_usage()==STREAMING)
+ return true;
+
+ return false;
+}
+
VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
{
return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
}
-unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, vector<char> &buffer) const
+unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
- auto u_begin = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
- auto t_begin = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
+ auto begin = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
unsigned n_buffers = 0;
- for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
- if(i->used)
- ++n_buffers;
unsigned n_images = 0;
- for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
+ for(auto i=begin; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
if(i->used)
- ++n_images;
+ {
+ if(i->type==PipelineState::UNIFORM_BLOCK)
+ ++n_buffers;
+ else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
+ ++n_images;
+ }
unsigned n_writes = n_buffers+n_images;
StructureBuilder sb(buffer, 3);
- VkWriteDescriptorSet *&writes = sb.add<VkWriteDescriptorSet>(n_writes);
- VkDescriptorBufferInfo *&buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
- VkDescriptorImageInfo *&images = sb.add<VkDescriptorImageInfo>(n_images);
+ VkWriteDescriptorSet *const &writes = sb.add<VkWriteDescriptorSet>(n_writes);
+ VkDescriptorBufferInfo *const &buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
+ VkDescriptorImageInfo *const &images = sb.add<VkDescriptorImageInfo>(n_images);
VkWriteDescriptorSet *write_ptr = writes;
VkDescriptorBufferInfo *buffer_ptr = buffers;
VkDescriptorImageInfo *image_ptr = images;
- for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
- if(i->used)
+ for(auto i=begin; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ {
+ if(!i->used)
+ continue;
+
+ if(i->type==PipelineState::UNIFORM_BLOCK)
{
buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
buffer_ptr->offset = i->block->get_offset();
+ if(i->buffer->get_usage()==STREAMING)
+ buffer_ptr->offset += frame*i->buffer->get_size();
buffer_ptr->range = i->block->get_data_size();
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
- write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
write_ptr->pBufferInfo = buffer_ptr;
++buffer_ptr;
- ++write_ptr;
}
-
- for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
- if(i->used)
+ else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
{
- image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
- if(i->level<0)
+ if(i->mip_level<0)
image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
else
- image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->level]);
- image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->mip_level]);
+
+ if(i->type==PipelineState::SAMPLED_TEXTURE)
+ {
+ image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
+ image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ }
+ else if(i->type==PipelineState::STORAGE_TEXTURE)
+ {
+ image_ptr->imageLayout = VK_IMAGE_LAYOUT_GENERAL;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ }
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
- write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
write_ptr->pImageInfo = image_ptr;
++image_ptr;
- ++write_ptr;
}
+ ++write_ptr;
+ }
+
return n_writes;
}
-void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
+void VulkanPipelineState::synchronize_resources() const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ for(const PipelineState::BoundResource &r: self.resources)
+ if(r.type==PipelineState::STORAGE_TEXTURE)
+ r.texture->change_layout(-1, VK_IMAGE_LAYOUT_GENERAL, false);
+}
+
+void VulkanPipelineState::apply(const VulkanCommandRecorder &vkCmd, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
- const VulkanFunctions &vk = device.get_functions();
if(!last)
+ {
unapplied = ~0U;
+ first_changed_desc_set = 0;
+ }
else if(last!=this)
{
const PipelineState &last_ps = *static_cast<const PipelineState *>(last);
if(handle!=last->handle)
+ {
unapplied |= PipelineState::SHPROG;
+ if(self.push_const_compat!=last_ps.push_const_compat)
+ {
+ unapplied |= PipelineState::RESOURCES;
+ first_changed_desc_set = 0;
+ }
+ }
if(self.vertex_setup!=last_ps.vertex_setup)
unapplied |= PipelineState::VERTEX_SETUP;
- for(unsigned i=0; (i<descriptor_set_handles.size() && i<last->descriptor_set_handles.size()); ++i)
- if(descriptor_set_handles[i]!=last->descriptor_set_handles[i])
+ for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
+ if(i>=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i])
{
- unapplied |= PipelineState::UNIFORMS;
+ unapplied |= PipelineState::RESOURCES;
+ first_changed_desc_set = min(first_changed_desc_set, i);
break;
}
if(self.viewport!=last_ps.viewport)
unapplied |= PipelineState::SCISSOR;
}
+ VkPipelineBindPoint bind_point = (self.shprog->is_compute() ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS);
if(unapplied&PipelineState::SHPROG)
- vk.CmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
+ vkCmd.BindPipeline(bind_point, handle);
- if(unapplied&PipelineState::VERTEX_SETUP)
+ if(!self.shprog->is_compute() && (unapplied&PipelineState::VERTEX_SETUP))
if(const VertexSetup *vs = self.vertex_setup)
{
- vk.CmdBindVertexBuffers(command_buffer, 0, vs->n_bindings, vs->buffers, vs->offsets);
+ vkCmd.BindVertexBuffers(0, vs->n_bindings, vs->buffers, vs->offsets);
VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
- vk.CmdBindIndexBuffer(command_buffer, vs->get_index_buffer()->handle, 0, index_type);
+ vkCmd.BindIndexBuffer(vs->get_index_buffer()->handle, 0, index_type);
}
- if(!self.uniform_blocks.empty())
+ if(!self.resources.empty())
{
- const PipelineState::BoundUniformBlock &first_block = self.uniform_blocks.front();
- if(first_block.used && first_block.binding==ReflectData::PUSH_CONSTANT)
+ const PipelineState::BoundResource &first_res = self.resources.front();
+ if(first_res.used && first_res.type==PipelineState::UNIFORM_BLOCK && first_res.binding==ReflectData::PUSH_CONSTANT)
{
- const UniformBlock &pc_block = *first_block.block;
- vk.CmdPushConstants(command_buffer, self.shprog->layout_handle, self.shprog->stage_flags,
+ const UniformBlock &pc_block = *first_res.block;
+ vkCmd.PushConstants(self.shprog->layout_handle, self.shprog->stage_flags,
pc_block.get_offset(), pc_block.get_data_size(), pc_block.get_data_pointer());
}
}
- if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_handles.empty())
+ if((unapplied&PipelineState::RESOURCES) && !descriptor_set_slots.empty())
{
- vector<uint32_t> dynamic_offsets;
- dynamic_offsets.reserve(self.uniform_blocks.size());
- for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
- if(u.used && u.binding>=0)
- {
- if(u.buffer->get_usage()==STREAMING)
- dynamic_offsets.push_back(frame*u.buffer->get_size());
- else
- dynamic_offsets.push_back(0);
- }
-
- vk.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
- 0, descriptor_set_handles.size(), descriptor_set_handles.data(), dynamic_offsets.size(), dynamic_offsets.data());
+ vector<VkDescriptorSet> descriptor_set_handles;
+ descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set);
+ for(unsigned i=first_changed_desc_set; i<descriptor_set_slots.size(); ++i)
+ descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
+ self.descriptor_set_slots[i], self, i, frame));
+
+ vkCmd.BindDescriptorSets(bind_point, self.shprog->layout_handle,
+ first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
}
- if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
+ if(!self.shprog->is_compute() && (unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR)))
{
Rect fb_rect = self.framebuffer->get_rect();
}
viewport.minDepth = 0.0f;
viewport.maxDepth = 1.0f;
- vk.CmdSetViewport(command_buffer, 0, 1, &viewport);
+ vkCmd.SetViewport(0, 1, &viewport);
}
if(unapplied&PipelineState::SCISSOR)
scissor.offset.y = scissor_rect.bottom;
scissor.extent.width = scissor_rect.width;
scissor.extent.height = scissor_rect.height;
- vk.CmdSetScissor(command_buffer, 0, 1, &scissor);
+ vkCmd.SetScissor(0, 1, &scissor);
}
}
unapplied = 0;
+ first_changed_desc_set = descriptor_set_slots.size();
}
} // namespace GL