push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
}
- constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
- PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
+ constexpr unsigned graphics_mask = PipelineState::FRAMEBUFFER|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
+ PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE|
+ PipelineState::PATCH_SIZE;
+ unsigned pipeline_mask = PipelineState::SHPROG;
+ if(!self.shprog->is_compute())
+ pipeline_mask |= graphics_mask;
if(changes&pipeline_mask)
{
handle = device.get_pipeline_cache().get_pipeline(self);
{
if(r.type==PipelineState::UNIFORM_BLOCK)
r.used = self.shprog->uses_uniform_block_binding(r.binding);
- else if(r.type==PipelineState::TEXTURE)
+ else if(r.type==PipelineState::SAMPLED_TEXTURE || r.type==PipelineState::STORAGE_TEXTURE)
{
r.used = self.shprog->uses_texture_binding(r.binding);
if(r.mip_level>=0)
r.texture->refresh_mip_views();
- r.sampler->refresh();
+ if(r.type==PipelineState::SAMPLED_TEXTURE)
+ r.sampler->refresh();
}
if(r.binding>=0)
changed_sets |= 1<<(r.binding>>20);
uint64_t VulkanPipelineState::compute_hash() const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
- const FrameFormat &format = self.framebuffer->get_format();
uint64_t result = hash<64>(self.shprog);
- result = hash_update<64>(result, self.vertex_setup->compute_hash());
- result = hash_round<64>(result, self.primitive_type);
- if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
+ if(!self.shprog->is_compute())
{
- result = hash_round<64>(result, self.front_face);
- result = hash_round<64>(result, self.face_cull);
- }
+ const FrameFormat &format = self.framebuffer->get_format();
- result = hash_round<64>(result, format.get_samples());
+ result = hash_update<64>(result, self.vertex_setup->compute_hash());
+ result = hash_round<64>(result, self.primitive_type);
- if(self.depth_test.enabled)
- {
- result = hash_round<64>(result, self.depth_test.compare);
- result = hash_update<64>(result, self.depth_test.write);
- }
+ if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
+ {
+ result = hash_round<64>(result, self.front_face);
+ result = hash_round<64>(result, self.face_cull);
+ }
- if(self.stencil_test.enabled)
- {
- result = hash_round<64>(result, self.stencil_test.compare);
- result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
- result = hash_round<64>(result, self.stencil_test.depth_fail_op);
- result = hash_round<64>(result, self.stencil_test.depth_pass_op);
- result = hash_update<64>(result, self.stencil_test.reference);
- }
+ result = hash_round<64>(result, format.get_samples());
+ if(format.get_samples()>1)
+ result = hash_round<64>(result, self.blend.alpha_to_coverage);
- if(self.blend.enabled)
- {
- result = hash_round<64>(result, self.blend.equation);
- result = hash_round<64>(result, self.blend.src_factor);
- result = hash_round<64>(result, self.blend.dst_factor);
- result = hash_round<64>(result, self.blend.write_mask);
- }
+ if(self.depth_test.enabled)
+ {
+ result = hash_round<64>(result, self.depth_test.compare);
+ result = hash_update<64>(result, self.depth_test.write);
+ }
- for(FrameAttachment a: format)
- result = hash_update<64>(result, a);
+ if(self.stencil_test.enabled)
+ {
+ result = hash_round<64>(result, self.stencil_test.compare);
+ result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_pass_op);
+ result = hash_update<64>(result, self.stencil_test.reference);
+ }
+
+ if(self.blend.enabled)
+ {
+ result = hash_round<64>(result, self.blend.equation);
+ result = hash_round<64>(result, self.blend.src_factor);
+ result = hash_round<64>(result, self.blend.dst_factor);
+ result = hash_round<64>(result, self.blend.write_mask);
+ }
+
+ for(FrameAttachment a: format)
+ result = hash_update<64>(result, a);
+ }
return result;
}
void VulkanPipelineState::fill_creation_info(vector<char> &buffer) const
+{
+ if(static_cast<const PipelineState *>(this)->shprog->is_compute())
+ fill_compute_creation_info(buffer);
+ else
+ fill_graphics_creation_info(buffer);
+}
+
+void VulkanPipelineState::fill_graphics_creation_info(vector<char> &buffer) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
const FrameFormat &format = self.framebuffer->get_format();
RenderPass render_pass;
render_pass.framebuffer = self.framebuffer;
+ render_pass.to_present = self.framebuffer->is_presentable();
render_pass.update(device);
unsigned n_color_attachments = 0;
++n_color_attachments;
}
- StructureBuilder sb(buffer, 10);
+ bool has_tessellation = (self.shprog && self.shprog->has_tessellation());
+
+ StructureBuilder sb(buffer, 12);
VkGraphicsPipelineCreateInfo *const &pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
VkPipelineInputAssemblyStateCreateInfo *const &input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
+ VkPipelineTessellationStateCreateInfo *const &tessellation_info = sb.add<VkPipelineTessellationStateCreateInfo>(has_tessellation);
+ VkPipelineTessellationDomainOriginStateCreateInfo *const &tess_origin_info = sb.add<VkPipelineTessellationDomainOriginStateCreateInfo>(has_tessellation);
VkPipelineViewportStateCreateInfo *const &viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
VkPipelineRasterizationStateCreateInfo *const &raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
VkPipelineMultisampleStateCreateInfo *const &multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
input_assembly_info->sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
input_assembly_info->topology = static_cast<VkPrimitiveTopology>(get_vulkan_primitive_type(self.primitive_type));
- input_assembly_info->primitiveRestartEnable = true;
+ input_assembly_info->primitiveRestartEnable = !has_tessellation;
+
+ if(has_tessellation)
+ {
+ tessellation_info->sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+ tessellation_info->pNext = tess_origin_info;
+ tessellation_info->patchControlPoints = self.patch_size;
+
+ tess_origin_info->sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+ tess_origin_info->domainOrigin = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT;
+ }
viewport_info->sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewport_info->viewportCount = 1;
multisample_info->sampleShadingEnable = VK_FALSE;
multisample_info->minSampleShading = 1.0f;
multisample_info->pSampleMask = 0;
- multisample_info->alphaToCoverageEnable = VK_FALSE;
+ multisample_info->alphaToCoverageEnable = (format.get_samples()>1 && self.blend.alpha_to_coverage ? VK_TRUE : VK_FALSE);
multisample_info->alphaToOneEnable = VK_FALSE;
depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
pipeline_info->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
pipeline_info->pInputAssemblyState = input_assembly_info;
- pipeline_info->pTessellationState = 0;
+ pipeline_info->pTessellationState = (has_tessellation ? tessellation_info : 0);
pipeline_info->pViewportState = viewport_info;
pipeline_info->pRasterizationState = raster_info;
pipeline_info->pMultisampleState = multisample_info;
pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
}
+void VulkanPipelineState::fill_compute_creation_info(vector<char> &buffer) const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ StructureBuilder sb(buffer, 1);
+ VkComputePipelineCreateInfo *const &pipeline_info = sb.add<VkComputePipelineCreateInfo>();
+
+ pipeline_info->sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
+
+ if(self.shprog)
+ {
+ pipeline_info->stage = *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
+ pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
+ }
+}
+
uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
}
- else if(i->type==PipelineState::TEXTURE)
+ else if(i->type==PipelineState::SAMPLED_TEXTURE)
{
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->sampler->handle));
result = hash_update<64>(result, i->mip_level);
}
+ else if(i->type==PipelineState::STORAGE_TEXTURE)
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
empty = false;
}
{
if(i->type==PipelineState::UNIFORM_BLOCK)
++n_buffers;
- else if(i->type==PipelineState::TEXTURE)
+ else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
++n_images;
}
unsigned n_writes = n_buffers+n_images;
++buffer_ptr;
}
- else if(i->type==PipelineState::TEXTURE)
+ else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
{
- image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
if(i->mip_level<0)
image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
else
image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->mip_level]);
- image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+
+ if(i->type==PipelineState::SAMPLED_TEXTURE)
+ {
+ image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
+ image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ }
+ else if(i->type==PipelineState::STORAGE_TEXTURE)
+ {
+ image_ptr->imageLayout = VK_IMAGE_LAYOUT_GENERAL;
+ write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ }
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
- write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
write_ptr->pImageInfo = image_ptr;
++image_ptr;
++write_ptr;
}
+#ifdef DEBUG
+ self.check_bound_resources();
+#endif
+
return n_writes;
}
+void VulkanPipelineState::synchronize_resources() const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ for(const PipelineState::BoundResource &r: self.resources)
+ if(r.type==PipelineState::STORAGE_TEXTURE)
+ r.texture->change_layout(-1, VK_IMAGE_LAYOUT_GENERAL, false);
+}
+
void VulkanPipelineState::apply(const VulkanCommandRecorder &vkCmd, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
unapplied |= PipelineState::SCISSOR;
}
+ VkPipelineBindPoint bind_point = (self.shprog->is_compute() ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS);
if(unapplied&PipelineState::SHPROG)
- vkCmd.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
+ vkCmd.BindPipeline(bind_point, handle);
- if(unapplied&PipelineState::VERTEX_SETUP)
+ if(!self.shprog->is_compute() && (unapplied&PipelineState::VERTEX_SETUP))
if(const VertexSetup *vs = self.vertex_setup)
{
vkCmd.BindVertexBuffers(0, vs->n_bindings, vs->buffers, vs->offsets);
descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
self.descriptor_set_slots[i], self, i, frame));
- vkCmd.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
+ vkCmd.BindDescriptorSets(bind_point, self.shprog->layout_handle,
first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
}
- if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
+ if(!self.shprog->is_compute() && (unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR)))
{
Rect fb_rect = self.framebuffer->get_rect();