]> git.tdb.fi Git - libs/gl.git/blob - source/backends/vulkan/pipelinestate_backend.cpp
Support compute shaders and compute operations
[libs/gl.git] / source / backends / vulkan / pipelinestate_backend.cpp
1 #include <msp/core/algorithm.h>
2 #include <msp/core/hash.h>
3 #include "batch.h"
4 #include "blend.h"
5 #include "buffer.h"
6 #include "depthtest.h"
7 #include "device.h"
8 #include "framebuffer.h"
9 #include "pipelinestate.h"
10 #include "pipelinestate_backend.h"
11 #include "program.h"
12 #include "rect.h"
13 #include "renderpass.h"
14 #include "sampler.h"
15 #include "stenciltest.h"
16 #include "structurebuilder.h"
17 #include "texture.h"
18 #include "uniformblock.h"
19 #include "vertexsetup.h"
20 #include "vulkan.h"
21
22 using namespace std;
23
24 namespace Msp {
25 namespace GL {
26
27 VulkanPipelineState::VulkanPipelineState():
28         device(Device::get_current())
29 { }
30
31 VulkanPipelineState::VulkanPipelineState(VulkanPipelineState &&other):
32         device(other.device),
33         handle(other.handle)
34 { }
35
36 void VulkanPipelineState::update() const
37 {
38         const PipelineState &self = *static_cast<const PipelineState *>(this);
39
40         unapplied |= changes&(PipelineState::VIEWPORT|PipelineState::SCISSOR|PipelineState::VERTEX_SETUP);
41
42         if(changes&PipelineState::VERTEX_SETUP)
43                 self.vertex_setup->refresh();
44
45         if(changes&PipelineState::SHPROG)
46         {
47                 push_const_compat = hash<32>(self.shprog->stage_flags);
48                 push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
49         }
50
51         constexpr unsigned graphics_mask = PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
52                 PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
53         unsigned pipeline_mask = PipelineState::SHPROG;
54         if(!self.shprog->is_compute())
55                 pipeline_mask |= graphics_mask;
56         if(changes&pipeline_mask)
57         {
58                 handle = device.get_pipeline_cache().get_pipeline(self);
59                 unapplied |= PipelineState::SHPROG;
60         }
61
62         if(changes&(PipelineState::SHPROG|PipelineState::RESOURCES))
63         {
64                 unsigned changed_sets = (changes&PipelineState::SHPROG ? ~0U : 0U);
65                 for(const PipelineState::BoundResource &r: self.resources)
66                         if(r.changed || changed_sets==~0U)
67                         {
68                                 if(r.type==PipelineState::UNIFORM_BLOCK)
69                                         r.used = self.shprog->uses_uniform_block_binding(r.binding);
70                                 else if(r.type==PipelineState::SAMPLED_TEXTURE || r.type==PipelineState::STORAGE_TEXTURE)
71                                 {
72                                         r.used = self.shprog->uses_texture_binding(r.binding);
73                                         if(r.mip_level>=0)
74                                                 r.texture->refresh_mip_views();
75                                         if(r.type==PipelineState::SAMPLED_TEXTURE)
76                                                 r.sampler->refresh();
77                                 }
78                                 if(r.binding>=0)
79                                         changed_sets |= 1<<(r.binding>>20);
80                                 r.changed = false;
81                         }
82
83                 if(changed_sets)
84                 {
85                         descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
86                         first_changed_desc_set = descriptor_set_slots.size();
87                         for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
88                                 if(changed_sets&(1<<i))
89                                 {
90                                         descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
91                                         first_changed_desc_set = min(first_changed_desc_set, i);
92                                 }
93
94                         unapplied |= PipelineState::RESOURCES;
95                 }
96         }
97
98         changes = 0;
99 }
100
101 uint64_t VulkanPipelineState::compute_hash() const
102 {
103         const PipelineState &self = *static_cast<const PipelineState *>(this);
104
105         uint64_t result = hash<64>(self.shprog);
106
107         if(!self.shprog->is_compute())
108         {
109                 const FrameFormat &format = self.framebuffer->get_format();
110
111                 result = hash_update<64>(result, self.vertex_setup->compute_hash());
112                 result = hash_round<64>(result, self.primitive_type);
113
114                 if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
115                 {
116                         result = hash_round<64>(result, self.front_face);
117                         result = hash_round<64>(result, self.face_cull);
118                 }
119
120                 result = hash_round<64>(result, format.get_samples());
121
122                 if(self.depth_test.enabled)
123                 {
124                         result = hash_round<64>(result, self.depth_test.compare);
125                         result = hash_update<64>(result, self.depth_test.write);
126                 }
127
128                 if(self.stencil_test.enabled)
129                 {
130                         result = hash_round<64>(result, self.stencil_test.compare);
131                         result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
132                         result = hash_round<64>(result, self.stencil_test.depth_fail_op);
133                         result = hash_round<64>(result, self.stencil_test.depth_pass_op);
134                         result = hash_update<64>(result, self.stencil_test.reference);
135                 }
136
137                 if(self.blend.enabled)
138                 {
139                         result = hash_round<64>(result, self.blend.equation);
140                         result = hash_round<64>(result, self.blend.src_factor);
141                         result = hash_round<64>(result, self.blend.dst_factor);
142                         result = hash_round<64>(result, self.blend.write_mask);
143                 }
144
145                 for(FrameAttachment a: format)
146                         result = hash_update<64>(result, a);
147         }
148
149         return result;
150 }
151
152 void VulkanPipelineState::fill_creation_info(vector<char> &buffer) const
153 {
154         if(static_cast<const PipelineState *>(this)->shprog->is_compute())
155                 fill_compute_creation_info(buffer);
156         else
157                 fill_graphics_creation_info(buffer);
158 }
159
160 void VulkanPipelineState::fill_graphics_creation_info(vector<char> &buffer) const
161 {
162         const PipelineState &self = *static_cast<const PipelineState *>(this);
163
164         const FrameFormat &format = self.framebuffer->get_format();
165         RenderPass render_pass;
166         render_pass.framebuffer = self.framebuffer;
167         render_pass.update(device);
168
169         unsigned n_color_attachments = 0;
170         for(FrameAttachment a: format)
171         {
172                 unsigned attach_pt = get_attach_point(a);
173                 if(attach_pt!=get_attach_point(DEPTH_ATTACHMENT) && attach_pt!=get_attach_point(STENCIL_ATTACHMENT))
174                         ++n_color_attachments;
175         }
176
177         StructureBuilder sb(buffer, 10);
178         VkGraphicsPipelineCreateInfo *const &pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
179         VkPipelineInputAssemblyStateCreateInfo *const &input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
180         VkPipelineViewportStateCreateInfo *const &viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
181         VkPipelineRasterizationStateCreateInfo *const &raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
182         VkPipelineMultisampleStateCreateInfo *const &multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
183         VkPipelineDepthStencilStateCreateInfo *const &depth_stencil_info = sb.add<VkPipelineDepthStencilStateCreateInfo>();
184         VkPipelineColorBlendStateCreateInfo *const &blend_info = sb.add<VkPipelineColorBlendStateCreateInfo>();
185         VkPipelineColorBlendAttachmentState *const &blend_attachments = sb.add<VkPipelineColorBlendAttachmentState>(n_color_attachments);
186         VkPipelineDynamicStateCreateInfo *const &dynamic_info = sb.add<VkPipelineDynamicStateCreateInfo>();
187         VkDynamicState *const &dynamic_states = sb.add<VkDynamicState>(2);
188
189         input_assembly_info->sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
190         input_assembly_info->topology = static_cast<VkPrimitiveTopology>(get_vulkan_primitive_type(self.primitive_type));
191         input_assembly_info->primitiveRestartEnable = true;
192
193         viewport_info->sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
194         viewport_info->viewportCount = 1;
195         viewport_info->pViewports = 0;
196         viewport_info->scissorCount = 1;
197         viewport_info->pScissors = 0;
198
199         raster_info->sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
200         raster_info->depthClampEnable = VK_FALSE;
201         raster_info->rasterizerDiscardEnable = VK_FALSE;
202         raster_info->polygonMode = VK_POLYGON_MODE_FILL;
203         raster_info->frontFace = (self.front_face==CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE);
204         if(self.face_cull==NO_CULL || self.front_face==NON_MANIFOLD)
205                 raster_info->cullMode = VK_CULL_MODE_NONE;
206         else
207                 raster_info->cullMode = (self.face_cull==CULL_FRONT ? VK_CULL_MODE_FRONT_BIT : VK_CULL_MODE_BACK_BIT);
208         raster_info->depthBiasEnable = VK_FALSE;
209         raster_info->depthBiasConstantFactor = 0.0f;
210         raster_info->depthBiasClamp = 0.0f;
211         raster_info->depthBiasSlopeFactor = 0.0f;
212         raster_info->lineWidth = 1.0f;
213
214         multisample_info->sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
215         multisample_info->rasterizationSamples = static_cast<VkSampleCountFlagBits>(get_vulkan_samples(format.get_samples()));
216         multisample_info->sampleShadingEnable = VK_FALSE;
217         multisample_info->minSampleShading = 1.0f;
218         multisample_info->pSampleMask = 0;
219         multisample_info->alphaToCoverageEnable = VK_FALSE;
220         multisample_info->alphaToOneEnable = VK_FALSE;
221
222         depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
223         depth_stencil_info->depthTestEnable = self.depth_test.enabled;
224         depth_stencil_info->depthWriteEnable = self.depth_test.write;
225         depth_stencil_info->depthCompareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.depth_test.compare));
226         depth_stencil_info->depthBoundsTestEnable = VK_FALSE;
227
228         depth_stencil_info->stencilTestEnable = self.stencil_test.enabled;
229         depth_stencil_info->front.failOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.stencil_fail_op));
230         depth_stencil_info->front.passOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_pass_op));
231         depth_stencil_info->front.depthFailOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_fail_op));
232         depth_stencil_info->front.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.stencil_test.compare));
233         depth_stencil_info->front.compareMask = 0xFFFFFFFFU;
234         depth_stencil_info->front.writeMask = 0xFFFFFFFFU;
235         depth_stencil_info->front.reference = self.stencil_test.reference;
236         depth_stencil_info->back = depth_stencil_info->front;
237
238         for(unsigned i=0; i<n_color_attachments; ++i)
239         {
240                 blend_attachments[i].blendEnable = self.blend.enabled;
241                 blend_attachments[i].srcColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.src_factor));
242                 blend_attachments[i].dstColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.dst_factor));
243                 blend_attachments[i].colorBlendOp = static_cast<VkBlendOp>(get_vulkan_blend_equation(self.blend.equation));
244                 blend_attachments[i].srcAlphaBlendFactor = blend_attachments[i].srcColorBlendFactor;
245                 blend_attachments[i].dstAlphaBlendFactor = blend_attachments[i].dstColorBlendFactor;
246                 blend_attachments[i].alphaBlendOp = blend_attachments[i].colorBlendOp;
247                 blend_attachments[i].colorWriteMask = get_vulkan_color_mask(self.blend.write_mask);
248         }
249
250         blend_info->sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
251         blend_info->attachmentCount = n_color_attachments;
252         blend_info->pAttachments = blend_attachments;
253
254         dynamic_states[0] = VK_DYNAMIC_STATE_VIEWPORT;
255         dynamic_states[1] = VK_DYNAMIC_STATE_SCISSOR;
256
257         dynamic_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
258         dynamic_info->dynamicStateCount = 2;
259         dynamic_info->pDynamicStates = dynamic_states;
260
261         pipeline_info->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
262
263         pipeline_info->pInputAssemblyState = input_assembly_info;
264         pipeline_info->pTessellationState = 0;
265         pipeline_info->pViewportState = viewport_info;
266         pipeline_info->pRasterizationState = raster_info;
267         pipeline_info->pMultisampleState = multisample_info;
268         pipeline_info->pDepthStencilState = depth_stencil_info;
269         pipeline_info->pColorBlendState = blend_info;
270         pipeline_info->pDynamicState = dynamic_info;
271         pipeline_info->renderPass = handle_cast<::VkRenderPass>(render_pass.handle);
272         pipeline_info->subpass = 0;
273
274         if(self.shprog)
275         {
276                 pipeline_info->stageCount = self.shprog->n_stages;
277                 pipeline_info->pStages = reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
278                 pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
279         }
280
281         if(self.vertex_setup)
282                 pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
283 }
284
285 void VulkanPipelineState::fill_compute_creation_info(vector<char> &buffer) const
286 {
287         const PipelineState &self = *static_cast<const PipelineState *>(this);
288
289         StructureBuilder sb(buffer, 1);
290         VkComputePipelineCreateInfo *const &pipeline_info = sb.add<VkComputePipelineCreateInfo>();
291
292         pipeline_info->sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
293
294         if(self.shprog)
295         {
296                 pipeline_info->stage = *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
297                 pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
298         }
299 }
300
301 uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
302 {
303         const PipelineState &self = *static_cast<const PipelineState *>(this);
304
305         uint64_t result = hash<64>(0, 0);
306         bool empty = true;
307
308         auto i = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
309         for(; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
310         {
311                 if(!i->used)
312                         continue;
313
314                 result = hash_update<64>(result, i->binding);
315                 result = hash_update<64>(result, i->type);
316                 if(i->type==PipelineState::UNIFORM_BLOCK)
317                 {
318                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
319                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
320                 }
321                 else if(i->type==PipelineState::SAMPLED_TEXTURE)
322                 {
323                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
324                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->sampler->handle));
325                         result = hash_update<64>(result, i->mip_level);
326                 }
327                 else if(i->type==PipelineState::STORAGE_TEXTURE)
328                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->texture->handle));
329                 empty = false;
330         }
331
332         if(!empty)
333                 result = hash_update<64>(result, self.shprog->stage_flags);
334
335         return result;
336 }
337
338 bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
339 {
340         const PipelineState &self = *static_cast<const PipelineState *>(this);
341
342         auto i = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
343         for(; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
344                 if(i->used && i->type==PipelineState::UNIFORM_BLOCK && i->buffer->get_usage()==STREAMING)
345                         return true;
346
347         return false;
348 }
349
350 VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
351 {
352         return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
353 }
354
355 unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
356 {
357         const PipelineState &self = *static_cast<const PipelineState *>(this);
358
359         auto begin = lower_bound_member(self.resources, static_cast<int>(index)<<20, &PipelineState::BoundResource::binding);
360
361         unsigned n_buffers = 0;
362         unsigned n_images = 0;
363         for(auto i=begin; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
364                 if(i->used)
365                 {
366                         if(i->type==PipelineState::UNIFORM_BLOCK)
367                                 ++n_buffers;
368                         else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
369                                 ++n_images;
370                 }
371         unsigned n_writes = n_buffers+n_images;
372
373         StructureBuilder sb(buffer, 3);
374         VkWriteDescriptorSet *const &writes = sb.add<VkWriteDescriptorSet>(n_writes);
375         VkDescriptorBufferInfo *const &buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
376         VkDescriptorImageInfo *const &images = sb.add<VkDescriptorImageInfo>(n_images);
377
378         VkWriteDescriptorSet *write_ptr = writes;
379         VkDescriptorBufferInfo *buffer_ptr = buffers;
380         VkDescriptorImageInfo *image_ptr = images;
381
382         for(auto i=begin; (i!=self.resources.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
383         {
384                 if(!i->used)
385                         continue;
386
387                 if(i->type==PipelineState::UNIFORM_BLOCK)
388                 {
389                         buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
390                         buffer_ptr->offset = i->block->get_offset();
391                         if(i->buffer->get_usage()==STREAMING)
392                                 buffer_ptr->offset += frame*i->buffer->get_size();
393                         buffer_ptr->range = i->block->get_data_size();
394
395                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
396                         write_ptr->dstBinding = i->binding&0xFFFFF;
397                         write_ptr->descriptorCount = 1;
398                         write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
399                         write_ptr->pBufferInfo = buffer_ptr;
400
401                         ++buffer_ptr;
402                 }
403                 else if(i->type==PipelineState::SAMPLED_TEXTURE || i->type==PipelineState::STORAGE_TEXTURE)
404                 {
405                         if(i->mip_level<0)
406                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
407                         else
408                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->mip_level]);
409
410                         if(i->type==PipelineState::SAMPLED_TEXTURE)
411                         {
412                                 image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
413                                 image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
414                                 write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
415                         }
416                         else if(i->type==PipelineState::STORAGE_TEXTURE)
417                         {
418                                 image_ptr->imageLayout = VK_IMAGE_LAYOUT_GENERAL;
419                                 write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
420                         }
421
422                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
423                         write_ptr->dstBinding = i->binding&0xFFFFF;
424                         write_ptr->descriptorCount = 1;
425                         write_ptr->pImageInfo = image_ptr;
426
427                         ++image_ptr;
428                 }
429
430                 ++write_ptr;
431         }
432
433         return n_writes;
434 }
435
436 void VulkanPipelineState::synchronize_resources(bool discard_fb_contents) const
437 {
438         const PipelineState &self = *static_cast<const PipelineState *>(this);
439
440         for(const PipelineState::BoundResource &r: self.resources)
441                 if(r.type==PipelineState::STORAGE_TEXTURE)
442                         r.texture->change_layout(-1, VK_IMAGE_LAYOUT_GENERAL, false);
443 }
444
445 void VulkanPipelineState::apply(const VulkanCommandRecorder &vkCmd, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
446 {
447         const PipelineState &self = *static_cast<const PipelineState *>(this);
448
449         if(!last)
450         {
451                 unapplied = ~0U;
452                 first_changed_desc_set = 0;
453         }
454         else if(last!=this)
455         {
456                 const PipelineState &last_ps = *static_cast<const PipelineState *>(last);
457                 if(handle!=last->handle)
458                 {
459                         unapplied |= PipelineState::SHPROG;
460                         if(self.push_const_compat!=last_ps.push_const_compat)
461                         {
462                                 unapplied |= PipelineState::RESOURCES;
463                                 first_changed_desc_set = 0;
464                         }
465                 }
466                 if(self.vertex_setup!=last_ps.vertex_setup)
467                         unapplied |= PipelineState::VERTEX_SETUP;
468                 for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
469                         if(i>=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i])
470                         {
471                                 unapplied |= PipelineState::RESOURCES;
472                                 first_changed_desc_set = min(first_changed_desc_set, i);
473                                 break;
474                         }
475                 if(self.viewport!=last_ps.viewport)
476                         unapplied |= PipelineState::VIEWPORT;
477                 if(self.scissor!=last_ps.scissor)
478                         unapplied |= PipelineState::SCISSOR;
479         }
480
481         VkPipelineBindPoint bind_point = (self.shprog->is_compute() ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS);
482         if(unapplied&PipelineState::SHPROG)
483                 vkCmd.BindPipeline(bind_point, handle);
484
485         if(!self.shprog->is_compute() && (unapplied&PipelineState::VERTEX_SETUP))
486                 if(const VertexSetup *vs = self.vertex_setup)
487                 {
488                         vkCmd.BindVertexBuffers(0, vs->n_bindings, vs->buffers, vs->offsets);
489                         VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
490                         vkCmd.BindIndexBuffer(vs->get_index_buffer()->handle, 0, index_type);
491                 }
492
493         if(!self.resources.empty())
494         {
495                 const PipelineState::BoundResource &first_res = self.resources.front();
496                 if(first_res.used && first_res.type==PipelineState::UNIFORM_BLOCK && first_res.binding==ReflectData::PUSH_CONSTANT)
497                 {
498                         const UniformBlock &pc_block = *first_res.block;
499                         vkCmd.PushConstants(self.shprog->layout_handle, self.shprog->stage_flags,
500                                 pc_block.get_offset(), pc_block.get_data_size(), pc_block.get_data_pointer());
501                 }
502         }
503
504         if((unapplied&PipelineState::RESOURCES) && !descriptor_set_slots.empty())
505         {
506                 vector<VkDescriptorSet> descriptor_set_handles;
507                 descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set);
508                 for(unsigned i=first_changed_desc_set; i<descriptor_set_slots.size(); ++i)
509                         descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
510                                 self.descriptor_set_slots[i], self, i, frame));
511
512                 vkCmd.BindDescriptorSets(bind_point, self.shprog->layout_handle,
513                         first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
514         }
515
516         if(!self.shprog->is_compute() && (unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR)))
517         {
518                 Rect fb_rect = self.framebuffer->get_rect();
519
520                 if(unapplied&PipelineState::VIEWPORT)
521                 {
522                         Rect viewport_rect = fb_rect.intersect(self.viewport);
523                         VkViewport viewport = { };
524                         viewport.x = viewport_rect.left;
525                         viewport.y = viewport_rect.bottom;
526                         viewport.width = viewport_rect.width;
527                         viewport.height = viewport_rect.height;
528                         if(negative_viewport)
529                         {
530                                 viewport.y += viewport.height;
531                                 viewport.height = -viewport.height;
532                         }
533                         viewport.minDepth = 0.0f;
534                         viewport.maxDepth = 1.0f;
535                         vkCmd.SetViewport(0, 1, &viewport);
536                 }
537
538                 if(unapplied&PipelineState::SCISSOR)
539                 {
540                         Rect scissor_rect = fb_rect.intersect(self.scissor);
541                         VkRect2D scissor = { };
542                         scissor.offset.x = scissor_rect.left;
543                         scissor.offset.y = scissor_rect.bottom;
544                         scissor.extent.width = scissor_rect.width;
545                         scissor.extent.height = scissor_rect.height;
546                         vkCmd.SetScissor(0, 1, &scissor);
547                 }
548         }
549
550         unapplied = 0;
551         first_changed_desc_set = descriptor_set_slots.size();
552 }
553
554 } // namespace GL
555 } // namespace Msp