]> git.tdb.fi Git - libs/gl.git/blob - source/backends/vulkan/pipelinestate_backend.cpp
Rebind all descriptor sets if push constant layout changes
[libs/gl.git] / source / backends / vulkan / pipelinestate_backend.cpp
1 #include <msp/core/algorithm.h>
2 #include <msp/core/hash.h>
3 #include "batch.h"
4 #include "blend.h"
5 #include "buffer.h"
6 #include "depthtest.h"
7 #include "device.h"
8 #include "framebuffer.h"
9 #include "pipelinestate.h"
10 #include "pipelinestate_backend.h"
11 #include "program.h"
12 #include "rect.h"
13 #include "sampler.h"
14 #include "stenciltest.h"
15 #include "structurebuilder.h"
16 #include "texture.h"
17 #include "uniformblock.h"
18 #include "vertexsetup.h"
19 #include "vulkan.h"
20
21 using namespace std;
22
23 namespace Msp {
24 namespace GL {
25
26 VulkanPipelineState::VulkanPipelineState():
27         device(Device::get_current())
28 { }
29
30 VulkanPipelineState::VulkanPipelineState(VulkanPipelineState &&other):
31         device(other.device),
32         handle(other.handle)
33 { }
34
35 void VulkanPipelineState::update() const
36 {
37         const PipelineState &self = *static_cast<const PipelineState *>(this);
38
39         unapplied |= changes&(PipelineState::VIEWPORT|PipelineState::SCISSOR|PipelineState::VERTEX_SETUP);
40
41         if(changes&PipelineState::VERTEX_SETUP)
42                 self.vertex_setup->refresh();
43
44         if(changes&PipelineState::SHPROG)
45         {
46                 push_const_compat = hash<32>(self.shprog->stage_flags);
47                 push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
48         }
49
50         constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
51                 PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
52         if(changes&pipeline_mask)
53         {
54                 handle = device.get_pipeline_cache().get_pipeline(self);
55                 unapplied |= PipelineState::SHPROG;
56         }
57
58         if(changes&(PipelineState::SHPROG|PipelineState::UNIFORMS|PipelineState::TEXTURES))
59         {
60                 unsigned changed_sets = (changes&PipelineState::SHPROG ? ~0U : 0U);
61                 for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
62                         if(u.changed || changed_sets==~0U)
63                         {
64                                 if(u.block)
65                                         u.used = self.shprog->uses_uniform_block_binding(u.binding);
66                                 if(u.binding>=0)
67                                         changed_sets |= 1<<(u.binding>>20);
68                                 u.changed = false;
69                         }
70                 for(const PipelineState::BoundTexture &t: self.textures)
71                         if(t.changed || changed_sets==~0U)
72                         {
73                                 if(t.texture && t.sampler)
74                                         t.used = self.shprog->uses_texture_binding(t.binding);
75                                 changed_sets |= 1<<(t.binding>>20);
76                                 if(t.texture && t.level>=0)
77                                         t.texture->refresh_mip_views();
78                                 if(t.sampler)
79                                         t.sampler->refresh();
80                                 t.changed = false;
81                         }
82
83                 if(changed_sets)
84                 {
85                         descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
86                         first_changed_desc_set = descriptor_set_slots.size();
87                         for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
88                                 if(changed_sets&(1<<i))
89                                 {
90                                         descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
91                                         first_changed_desc_set = min(first_changed_desc_set, i);
92                                 }
93
94                         unapplied |= PipelineState::UNIFORMS;
95                 }
96         }
97
98         changes = 0;
99 }
100
101 uint64_t VulkanPipelineState::compute_hash() const
102 {
103         const PipelineState &self = *static_cast<const PipelineState *>(this);
104         const FrameFormat &format = self.framebuffer->get_format();
105
106         uint64_t result = hash<64>(self.shprog);
107         result = hash_update<64>(result, self.vertex_setup->compute_hash());
108         result = hash_round<64>(result, self.primitive_type);
109
110         if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
111         {
112                 result = hash_round<64>(result, self.front_face);
113                 result = hash_round<64>(result, self.face_cull);
114         }
115
116         result = hash_round<64>(result, format.get_samples());
117
118         if(self.depth_test.enabled)
119         {
120                 result = hash_round<64>(result, self.depth_test.compare);
121                 result = hash_update<64>(result, self.depth_test.write);
122         }
123
124         if(self.stencil_test.enabled)
125         {
126                 result = hash_round<64>(result, self.stencil_test.compare);
127                 result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
128                 result = hash_round<64>(result, self.stencil_test.depth_fail_op);
129                 result = hash_round<64>(result, self.stencil_test.depth_pass_op);
130                 result = hash_update<64>(result, self.stencil_test.reference);
131         }
132
133         if(self.blend.enabled)
134         {
135                 result = hash_round<64>(result, self.blend.equation);
136                 result = hash_round<64>(result, self.blend.src_factor);
137                 result = hash_round<64>(result, self.blend.dst_factor);
138                 result = hash_round<64>(result, self.blend.write_mask);
139         }
140
141         for(FrameAttachment a: format)
142                 result = hash_update<64>(result, a);
143
144         return result;
145 }
146
147 void VulkanPipelineState::fill_creation_info(vector<char> &buffer) const
148 {
149         const PipelineState &self = *static_cast<const PipelineState *>(this);
150
151         const FrameFormat &format = self.framebuffer->get_format();
152         VkRenderPass render_pass = device.get_pipeline_cache().get_render_pass(format, false, false, false);
153
154         unsigned n_color_attachments = 0;
155         for(FrameAttachment a: format)
156         {
157                 unsigned attach_pt = get_attach_point(a);
158                 if(attach_pt!=get_attach_point(DEPTH_ATTACHMENT) && attach_pt!=get_attach_point(STENCIL_ATTACHMENT))
159                         ++n_color_attachments;
160         }
161
162         StructureBuilder sb(buffer, 10);
163         VkGraphicsPipelineCreateInfo *&pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
164         VkPipelineInputAssemblyStateCreateInfo *&input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
165         VkPipelineViewportStateCreateInfo *&viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
166         VkPipelineRasterizationStateCreateInfo *&raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
167         VkPipelineMultisampleStateCreateInfo *&multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
168         VkPipelineDepthStencilStateCreateInfo *&depth_stencil_info = sb.add<VkPipelineDepthStencilStateCreateInfo>();
169         VkPipelineColorBlendStateCreateInfo *&blend_info = sb.add<VkPipelineColorBlendStateCreateInfo>();
170         VkPipelineColorBlendAttachmentState *&blend_attachments = sb.add<VkPipelineColorBlendAttachmentState>(n_color_attachments);
171         VkPipelineDynamicStateCreateInfo *&dynamic_info = sb.add<VkPipelineDynamicStateCreateInfo>();
172         VkDynamicState *&dynamic_states = sb.add<VkDynamicState>(2);
173
174         input_assembly_info->sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
175         input_assembly_info->topology = static_cast<VkPrimitiveTopology>(get_vulkan_primitive_type(self.primitive_type));
176         input_assembly_info->primitiveRestartEnable = true;
177
178         viewport_info->sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
179         viewport_info->viewportCount = 1;
180         viewport_info->pViewports = 0;
181         viewport_info->scissorCount = 1;
182         viewport_info->pScissors = 0;
183
184         raster_info->sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
185         raster_info->depthClampEnable = VK_FALSE;
186         raster_info->rasterizerDiscardEnable = VK_FALSE;
187         raster_info->polygonMode = VK_POLYGON_MODE_FILL;
188         raster_info->frontFace = (self.front_face==CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE);
189         if(self.face_cull==NO_CULL || self.front_face==NON_MANIFOLD)
190                 raster_info->cullMode = VK_CULL_MODE_NONE;
191         else
192                 raster_info->cullMode = (self.face_cull==CULL_FRONT ? VK_CULL_MODE_FRONT_BIT : VK_CULL_MODE_BACK_BIT);
193         raster_info->depthBiasEnable = VK_FALSE;
194         raster_info->depthBiasConstantFactor = 0.0f;
195         raster_info->depthBiasClamp = 0.0f;
196         raster_info->depthBiasSlopeFactor = 0.0f;
197         raster_info->lineWidth = 1.0f;
198
199         multisample_info->sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
200         multisample_info->rasterizationSamples = static_cast<VkSampleCountFlagBits>(get_vulkan_samples(format.get_samples()));
201         multisample_info->sampleShadingEnable = VK_FALSE;
202         multisample_info->minSampleShading = 1.0f;
203         multisample_info->pSampleMask = 0;
204         multisample_info->alphaToCoverageEnable = VK_FALSE;
205         multisample_info->alphaToOneEnable = VK_FALSE;
206
207         depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
208         depth_stencil_info->depthTestEnable = self.depth_test.enabled;
209         depth_stencil_info->depthWriteEnable = self.depth_test.write;
210         depth_stencil_info->depthCompareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.depth_test.compare));
211         depth_stencil_info->depthBoundsTestEnable = VK_FALSE;
212
213         depth_stencil_info->stencilTestEnable = self.stencil_test.enabled;
214         depth_stencil_info->front.failOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.stencil_fail_op));
215         depth_stencil_info->front.passOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_pass_op));
216         depth_stencil_info->front.depthFailOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_fail_op));
217         depth_stencil_info->front.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.stencil_test.compare));
218         depth_stencil_info->front.compareMask = 0xFFFFFFFFU;
219         depth_stencil_info->front.writeMask = 0xFFFFFFFFU;
220         depth_stencil_info->front.reference = self.stencil_test.reference;
221         depth_stencil_info->back = depth_stencil_info->front;
222
223         for(unsigned i=0; i<n_color_attachments; ++i)
224         {
225                 blend_attachments[i].blendEnable = self.blend.enabled;
226                 blend_attachments[i].srcColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.src_factor));
227                 blend_attachments[i].dstColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.dst_factor));
228                 blend_attachments[i].colorBlendOp = static_cast<VkBlendOp>(get_vulkan_blend_equation(self.blend.equation));
229                 blend_attachments[i].srcAlphaBlendFactor = blend_attachments[i].srcColorBlendFactor;
230                 blend_attachments[i].dstAlphaBlendFactor = blend_attachments[i].dstColorBlendFactor;
231                 blend_attachments[i].alphaBlendOp = blend_attachments[i].colorBlendOp;
232                 blend_attachments[i].colorWriteMask = get_vulkan_color_mask(self.blend.write_mask);
233         }
234
235         blend_info->sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
236         blend_info->attachmentCount = n_color_attachments;
237         blend_info->pAttachments = blend_attachments;
238
239         dynamic_states[0] = VK_DYNAMIC_STATE_VIEWPORT;
240         dynamic_states[1] = VK_DYNAMIC_STATE_SCISSOR;
241
242         dynamic_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
243         dynamic_info->dynamicStateCount = 2;
244         dynamic_info->pDynamicStates = dynamic_states;
245
246         pipeline_info->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
247
248         pipeline_info->pInputAssemblyState = input_assembly_info;
249         pipeline_info->pTessellationState = 0;
250         pipeline_info->pViewportState = viewport_info;
251         pipeline_info->pRasterizationState = raster_info;
252         pipeline_info->pMultisampleState = multisample_info;
253         pipeline_info->pDepthStencilState = depth_stencil_info;
254         pipeline_info->pColorBlendState = blend_info;
255         pipeline_info->pDynamicState = dynamic_info;
256         pipeline_info->renderPass = handle_cast<::VkRenderPass>(render_pass);
257         pipeline_info->subpass = 0;
258
259         if(self.shprog)
260         {
261                 pipeline_info->stageCount = self.shprog->n_stages;
262                 pipeline_info->pStages = reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
263                 pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
264         }
265
266         if(self.vertex_setup)
267                 pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
268 }
269
270 uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
271 {
272         const PipelineState &self = *static_cast<const PipelineState *>(this);
273
274         uint64_t result = hash<64>(0, 0);
275         bool empty = true;
276
277         auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
278         for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
279                 if(i->used)
280                 {
281                         result = hash_update<64>(result, i->binding);
282                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
283                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
284                         empty = false;
285                 }
286
287         auto j = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
288         for(; (j!=self.textures.end() && j->binding>>20==index); ++j)
289                 if(j->used)
290                 {
291                         result = hash_update<64>(result, j->binding);
292                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->texture->handle));
293                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->sampler->handle));
294                         result = hash_update<64>(result, j->level);
295                         empty = false;
296                 }
297
298         if(!empty)
299                 result = hash_update<64>(result, self.shprog->stage_flags);
300
301         return result;
302 }
303
304 bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
305 {
306         const PipelineState &self = *static_cast<const PipelineState *>(this);
307
308         auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
309         for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
310                 if(i->used && i->buffer && i->buffer->get_usage()==STREAMING)
311                         return true;
312
313         return false;
314 }
315
316 VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
317 {
318         return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
319 }
320
321 unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
322 {
323         const PipelineState &self = *static_cast<const PipelineState *>(this);
324
325         auto u_begin = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
326         auto t_begin = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
327
328         unsigned n_buffers = 0;
329         for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
330                 if(i->used)
331                         ++n_buffers;
332         unsigned n_images = 0;
333         for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
334                 if(i->used)
335                         ++n_images;
336         unsigned n_writes = n_buffers+n_images;
337
338         StructureBuilder sb(buffer, 3);
339         VkWriteDescriptorSet *&writes = sb.add<VkWriteDescriptorSet>(n_writes);
340         VkDescriptorBufferInfo *&buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
341         VkDescriptorImageInfo *&images = sb.add<VkDescriptorImageInfo>(n_images);
342
343         VkWriteDescriptorSet *write_ptr = writes;
344         VkDescriptorBufferInfo *buffer_ptr = buffers;
345         VkDescriptorImageInfo *image_ptr = images;
346
347         for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
348                 if(i->used)
349                 {
350                         buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
351                         buffer_ptr->offset = i->block->get_offset();
352                         if(i->buffer->get_usage()==STREAMING)
353                                 buffer_ptr->offset += frame*i->buffer->get_size();
354                         buffer_ptr->range = i->block->get_data_size();
355
356                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
357                         write_ptr->dstBinding = i->binding&0xFFFFF;
358                         write_ptr->descriptorCount = 1;
359                         write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
360                         write_ptr->pBufferInfo = buffer_ptr;
361
362                         ++buffer_ptr;
363                         ++write_ptr;
364                 }
365
366         for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
367                 if(i->used)
368                 {
369                         image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
370                         if(i->level<0)
371                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
372                         else
373                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->level]);
374                         image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
375
376                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
377                         write_ptr->dstBinding = i->binding&0xFFFFF;
378                         write_ptr->descriptorCount = 1;
379                         write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
380                         write_ptr->pImageInfo = image_ptr;
381
382                         ++image_ptr;
383                         ++write_ptr;
384                 }
385
386         return n_writes;
387 }
388
389 void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
390 {
391         const PipelineState &self = *static_cast<const PipelineState *>(this);
392         const VulkanFunctions &vk = device.get_functions();
393
394         if(!last)
395         {
396                 unapplied = ~0U;
397                 first_changed_desc_set = 0;
398         }
399         else if(last!=this)
400         {
401                 const PipelineState &last_ps = *static_cast<const PipelineState *>(last);
402                 if(handle!=last->handle)
403                 {
404                         unapplied |= PipelineState::SHPROG;
405                         if(self.push_const_compat!=last_ps.push_const_compat)
406                         {
407                                 unapplied |= PipelineState::UNIFORMS;
408                                 first_changed_desc_set = 0;
409                         }
410                 }
411                 if(self.vertex_setup!=last_ps.vertex_setup)
412                         unapplied |= PipelineState::VERTEX_SETUP;
413                 for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
414                         if(i>=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i])
415                         {
416                                 unapplied |= PipelineState::UNIFORMS;
417                                 first_changed_desc_set = min(first_changed_desc_set, i);
418                                 break;
419                         }
420                 if(self.viewport!=last_ps.viewport)
421                         unapplied |= PipelineState::VIEWPORT;
422                 if(self.scissor!=last_ps.scissor)
423                         unapplied |= PipelineState::SCISSOR;
424         }
425
426         if(unapplied&PipelineState::SHPROG)
427                 vk.CmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
428
429         if(unapplied&PipelineState::VERTEX_SETUP)
430                 if(const VertexSetup *vs = self.vertex_setup)
431                 {
432                         vk.CmdBindVertexBuffers(command_buffer, 0, vs->n_bindings, vs->buffers, vs->offsets);
433                         VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
434                         vk.CmdBindIndexBuffer(command_buffer, vs->get_index_buffer()->handle, 0, index_type);
435                 }
436
437         if(!self.uniform_blocks.empty())
438         {
439                 const PipelineState::BoundUniformBlock &first_block = self.uniform_blocks.front();
440                 if(first_block.used && first_block.binding==ReflectData::PUSH_CONSTANT)
441                 {
442                         const UniformBlock &pc_block = *first_block.block;
443                         vk.CmdPushConstants(command_buffer, self.shprog->layout_handle, self.shprog->stage_flags,
444                                 pc_block.get_offset(), pc_block.get_data_size(), pc_block.get_data_pointer());
445                 }
446         }
447
448         if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_slots.empty())
449         {
450                 vector<VkDescriptorSet> descriptor_set_handles;
451                 descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set);
452                 for(unsigned i=first_changed_desc_set; i<descriptor_set_slots.size(); ++i)
453                         descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
454                                 self.descriptor_set_slots[i], self, i, frame));
455
456                 vk.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
457                         first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
458         }
459
460         if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
461         {
462                 Rect fb_rect = self.framebuffer->get_rect();
463
464                 if(unapplied&PipelineState::VIEWPORT)
465                 {
466                         Rect viewport_rect = fb_rect.intersect(self.viewport);
467                         VkViewport viewport = { };
468                         viewport.x = viewport_rect.left;
469                         viewport.y = viewport_rect.bottom;
470                         viewport.width = viewport_rect.width;
471                         viewport.height = viewport_rect.height;
472                         if(negative_viewport)
473                         {
474                                 viewport.y += viewport.height;
475                                 viewport.height = -viewport.height;
476                         }
477                         viewport.minDepth = 0.0f;
478                         viewport.maxDepth = 1.0f;
479                         vk.CmdSetViewport(command_buffer, 0, 1, &viewport);
480                 }
481
482                 if(unapplied&PipelineState::SCISSOR)
483                 {
484                         Rect scissor_rect = fb_rect.intersect(self.scissor);
485                         VkRect2D scissor = { };
486                         scissor.offset.x = scissor_rect.left;
487                         scissor.offset.y = scissor_rect.bottom;
488                         scissor.extent.width = scissor_rect.width;
489                         scissor.extent.height = scissor_rect.height;
490                         vk.CmdSetScissor(command_buffer, 0, 1, &scissor);
491                 }
492         }
493
494         unapplied = 0;
495         first_changed_desc_set = descriptor_set_slots.size();
496 }
497
498 } // namespace GL
499 } // namespace Msp