]> git.tdb.fi Git - libs/gl.git/blob - source/backends/vulkan/pipelinestate_backend.cpp
b7e50873aec14f1cb16a841916adb5a6ccfb97cd
[libs/gl.git] / source / backends / vulkan / pipelinestate_backend.cpp
1 #include <msp/core/algorithm.h>
2 #include <msp/core/hash.h>
3 #include "batch.h"
4 #include "blend.h"
5 #include "buffer.h"
6 #include "depthtest.h"
7 #include "device.h"
8 #include "framebuffer.h"
9 #include "pipelinestate.h"
10 #include "pipelinestate_backend.h"
11 #include "program.h"
12 #include "rect.h"
13 #include "renderpass.h"
14 #include "sampler.h"
15 #include "stenciltest.h"
16 #include "structurebuilder.h"
17 #include "texture.h"
18 #include "uniformblock.h"
19 #include "vertexsetup.h"
20 #include "vulkan.h"
21
22 using namespace std;
23
24 namespace Msp {
25 namespace GL {
26
27 VulkanPipelineState::VulkanPipelineState():
28         device(Device::get_current())
29 { }
30
31 VulkanPipelineState::VulkanPipelineState(VulkanPipelineState &&other):
32         device(other.device),
33         handle(other.handle)
34 { }
35
36 void VulkanPipelineState::update() const
37 {
38         const PipelineState &self = *static_cast<const PipelineState *>(this);
39
40         unapplied |= changes&(PipelineState::VIEWPORT|PipelineState::SCISSOR|PipelineState::VERTEX_SETUP);
41
42         if(changes&PipelineState::VERTEX_SETUP)
43                 self.vertex_setup->refresh();
44
45         if(changes&PipelineState::SHPROG)
46         {
47                 push_const_compat = hash<32>(self.shprog->stage_flags);
48                 push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
49         }
50
51         constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
52                 PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
53         if(changes&pipeline_mask)
54         {
55                 handle = device.get_pipeline_cache().get_pipeline(self);
56                 unapplied |= PipelineState::SHPROG;
57         }
58
59         if(changes&(PipelineState::SHPROG|PipelineState::UNIFORMS|PipelineState::TEXTURES))
60         {
61                 unsigned changed_sets = (changes&PipelineState::SHPROG ? ~0U : 0U);
62                 for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
63                         if(u.changed || changed_sets==~0U)
64                         {
65                                 if(u.block)
66                                         u.used = self.shprog->uses_uniform_block_binding(u.binding);
67                                 if(u.binding>=0)
68                                         changed_sets |= 1<<(u.binding>>20);
69                                 u.changed = false;
70                         }
71                 for(const PipelineState::BoundTexture &t: self.textures)
72                         if(t.changed || changed_sets==~0U)
73                         {
74                                 if(t.texture && t.sampler)
75                                         t.used = self.shprog->uses_texture_binding(t.binding);
76                                 changed_sets |= 1<<(t.binding>>20);
77                                 if(t.texture && t.level>=0)
78                                         t.texture->refresh_mip_views();
79                                 if(t.sampler)
80                                         t.sampler->refresh();
81                                 t.changed = false;
82                         }
83
84                 if(changed_sets)
85                 {
86                         descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
87                         first_changed_desc_set = descriptor_set_slots.size();
88                         for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
89                                 if(changed_sets&(1<<i))
90                                 {
91                                         descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
92                                         first_changed_desc_set = min(first_changed_desc_set, i);
93                                 }
94
95                         unapplied |= PipelineState::UNIFORMS;
96                 }
97         }
98
99         changes = 0;
100 }
101
102 uint64_t VulkanPipelineState::compute_hash() const
103 {
104         const PipelineState &self = *static_cast<const PipelineState *>(this);
105         const FrameFormat &format = self.framebuffer->get_format();
106
107         uint64_t result = hash<64>(self.shprog);
108         result = hash_update<64>(result, self.vertex_setup->compute_hash());
109         result = hash_round<64>(result, self.primitive_type);
110
111         if(self.front_face!=NON_MANIFOLD && self.face_cull!=NO_CULL)
112         {
113                 result = hash_round<64>(result, self.front_face);
114                 result = hash_round<64>(result, self.face_cull);
115         }
116
117         result = hash_round<64>(result, format.get_samples());
118
119         if(self.depth_test.enabled)
120         {
121                 result = hash_round<64>(result, self.depth_test.compare);
122                 result = hash_update<64>(result, self.depth_test.write);
123         }
124
125         if(self.stencil_test.enabled)
126         {
127                 result = hash_round<64>(result, self.stencil_test.compare);
128                 result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
129                 result = hash_round<64>(result, self.stencil_test.depth_fail_op);
130                 result = hash_round<64>(result, self.stencil_test.depth_pass_op);
131                 result = hash_update<64>(result, self.stencil_test.reference);
132         }
133
134         if(self.blend.enabled)
135         {
136                 result = hash_round<64>(result, self.blend.equation);
137                 result = hash_round<64>(result, self.blend.src_factor);
138                 result = hash_round<64>(result, self.blend.dst_factor);
139                 result = hash_round<64>(result, self.blend.write_mask);
140         }
141
142         for(FrameAttachment a: format)
143                 result = hash_update<64>(result, a);
144
145         return result;
146 }
147
148 void VulkanPipelineState::fill_creation_info(vector<char> &buffer) const
149 {
150         const PipelineState &self = *static_cast<const PipelineState *>(this);
151
152         const FrameFormat &format = self.framebuffer->get_format();
153         RenderPass render_pass;
154         render_pass.framebuffer = self.framebuffer;
155         render_pass.update(device);
156
157         unsigned n_color_attachments = 0;
158         for(FrameAttachment a: format)
159         {
160                 unsigned attach_pt = get_attach_point(a);
161                 if(attach_pt!=get_attach_point(DEPTH_ATTACHMENT) && attach_pt!=get_attach_point(STENCIL_ATTACHMENT))
162                         ++n_color_attachments;
163         }
164
165         StructureBuilder sb(buffer, 10);
166         VkGraphicsPipelineCreateInfo *const &pipeline_info = sb.add<VkGraphicsPipelineCreateInfo>();
167         VkPipelineInputAssemblyStateCreateInfo *const &input_assembly_info = sb.add<VkPipelineInputAssemblyStateCreateInfo>();
168         VkPipelineViewportStateCreateInfo *const &viewport_info = sb.add<VkPipelineViewportStateCreateInfo>();
169         VkPipelineRasterizationStateCreateInfo *const &raster_info = sb.add<VkPipelineRasterizationStateCreateInfo>();
170         VkPipelineMultisampleStateCreateInfo *const &multisample_info = sb.add<VkPipelineMultisampleStateCreateInfo>();
171         VkPipelineDepthStencilStateCreateInfo *const &depth_stencil_info = sb.add<VkPipelineDepthStencilStateCreateInfo>();
172         VkPipelineColorBlendStateCreateInfo *const &blend_info = sb.add<VkPipelineColorBlendStateCreateInfo>();
173         VkPipelineColorBlendAttachmentState *const &blend_attachments = sb.add<VkPipelineColorBlendAttachmentState>(n_color_attachments);
174         VkPipelineDynamicStateCreateInfo *const &dynamic_info = sb.add<VkPipelineDynamicStateCreateInfo>();
175         VkDynamicState *const &dynamic_states = sb.add<VkDynamicState>(2);
176
177         input_assembly_info->sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
178         input_assembly_info->topology = static_cast<VkPrimitiveTopology>(get_vulkan_primitive_type(self.primitive_type));
179         input_assembly_info->primitiveRestartEnable = true;
180
181         viewport_info->sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
182         viewport_info->viewportCount = 1;
183         viewport_info->pViewports = 0;
184         viewport_info->scissorCount = 1;
185         viewport_info->pScissors = 0;
186
187         raster_info->sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
188         raster_info->depthClampEnable = VK_FALSE;
189         raster_info->rasterizerDiscardEnable = VK_FALSE;
190         raster_info->polygonMode = VK_POLYGON_MODE_FILL;
191         raster_info->frontFace = (self.front_face==CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE);
192         if(self.face_cull==NO_CULL || self.front_face==NON_MANIFOLD)
193                 raster_info->cullMode = VK_CULL_MODE_NONE;
194         else
195                 raster_info->cullMode = (self.face_cull==CULL_FRONT ? VK_CULL_MODE_FRONT_BIT : VK_CULL_MODE_BACK_BIT);
196         raster_info->depthBiasEnable = VK_FALSE;
197         raster_info->depthBiasConstantFactor = 0.0f;
198         raster_info->depthBiasClamp = 0.0f;
199         raster_info->depthBiasSlopeFactor = 0.0f;
200         raster_info->lineWidth = 1.0f;
201
202         multisample_info->sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
203         multisample_info->rasterizationSamples = static_cast<VkSampleCountFlagBits>(get_vulkan_samples(format.get_samples()));
204         multisample_info->sampleShadingEnable = VK_FALSE;
205         multisample_info->minSampleShading = 1.0f;
206         multisample_info->pSampleMask = 0;
207         multisample_info->alphaToCoverageEnable = VK_FALSE;
208         multisample_info->alphaToOneEnable = VK_FALSE;
209
210         depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
211         depth_stencil_info->depthTestEnable = self.depth_test.enabled;
212         depth_stencil_info->depthWriteEnable = self.depth_test.write;
213         depth_stencil_info->depthCompareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.depth_test.compare));
214         depth_stencil_info->depthBoundsTestEnable = VK_FALSE;
215
216         depth_stencil_info->stencilTestEnable = self.stencil_test.enabled;
217         depth_stencil_info->front.failOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.stencil_fail_op));
218         depth_stencil_info->front.passOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_pass_op));
219         depth_stencil_info->front.depthFailOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_fail_op));
220         depth_stencil_info->front.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.stencil_test.compare));
221         depth_stencil_info->front.compareMask = 0xFFFFFFFFU;
222         depth_stencil_info->front.writeMask = 0xFFFFFFFFU;
223         depth_stencil_info->front.reference = self.stencil_test.reference;
224         depth_stencil_info->back = depth_stencil_info->front;
225
226         for(unsigned i=0; i<n_color_attachments; ++i)
227         {
228                 blend_attachments[i].blendEnable = self.blend.enabled;
229                 blend_attachments[i].srcColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.src_factor));
230                 blend_attachments[i].dstColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.dst_factor));
231                 blend_attachments[i].colorBlendOp = static_cast<VkBlendOp>(get_vulkan_blend_equation(self.blend.equation));
232                 blend_attachments[i].srcAlphaBlendFactor = blend_attachments[i].srcColorBlendFactor;
233                 blend_attachments[i].dstAlphaBlendFactor = blend_attachments[i].dstColorBlendFactor;
234                 blend_attachments[i].alphaBlendOp = blend_attachments[i].colorBlendOp;
235                 blend_attachments[i].colorWriteMask = get_vulkan_color_mask(self.blend.write_mask);
236         }
237
238         blend_info->sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
239         blend_info->attachmentCount = n_color_attachments;
240         blend_info->pAttachments = blend_attachments;
241
242         dynamic_states[0] = VK_DYNAMIC_STATE_VIEWPORT;
243         dynamic_states[1] = VK_DYNAMIC_STATE_SCISSOR;
244
245         dynamic_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
246         dynamic_info->dynamicStateCount = 2;
247         dynamic_info->pDynamicStates = dynamic_states;
248
249         pipeline_info->sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
250
251         pipeline_info->pInputAssemblyState = input_assembly_info;
252         pipeline_info->pTessellationState = 0;
253         pipeline_info->pViewportState = viewport_info;
254         pipeline_info->pRasterizationState = raster_info;
255         pipeline_info->pMultisampleState = multisample_info;
256         pipeline_info->pDepthStencilState = depth_stencil_info;
257         pipeline_info->pColorBlendState = blend_info;
258         pipeline_info->pDynamicState = dynamic_info;
259         pipeline_info->renderPass = handle_cast<::VkRenderPass>(render_pass.handle);
260         pipeline_info->subpass = 0;
261
262         if(self.shprog)
263         {
264                 pipeline_info->stageCount = self.shprog->n_stages;
265                 pipeline_info->pStages = reinterpret_cast<const VkPipelineShaderStageCreateInfo *>(self.shprog->creation_info.data());
266                 pipeline_info->layout = handle_cast<::VkPipelineLayout>(self.shprog->layout_handle);
267         }
268
269         if(self.vertex_setup)
270                 pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
271 }
272
273 uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
274 {
275         const PipelineState &self = *static_cast<const PipelineState *>(this);
276
277         uint64_t result = hash<64>(0, 0);
278         bool empty = true;
279
280         auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
281         for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
282                 if(i->used)
283                 {
284                         result = hash_update<64>(result, i->binding);
285                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
286                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
287                         empty = false;
288                 }
289
290         auto j = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
291         for(; (j!=self.textures.end() && j->binding>>20==index); ++j)
292                 if(j->used)
293                 {
294                         result = hash_update<64>(result, j->binding);
295                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->texture->handle));
296                         result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->sampler->handle));
297                         result = hash_update<64>(result, j->level);
298                         empty = false;
299                 }
300
301         if(!empty)
302                 result = hash_update<64>(result, self.shprog->stage_flags);
303
304         return result;
305 }
306
307 bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
308 {
309         const PipelineState &self = *static_cast<const PipelineState *>(this);
310
311         auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
312         for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
313                 if(i->used && i->buffer && i->buffer->get_usage()==STREAMING)
314                         return true;
315
316         return false;
317 }
318
319 VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
320 {
321         return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
322 }
323
324 unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
325 {
326         const PipelineState &self = *static_cast<const PipelineState *>(this);
327
328         auto u_begin = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
329         auto t_begin = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
330
331         unsigned n_buffers = 0;
332         for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
333                 if(i->used)
334                         ++n_buffers;
335         unsigned n_images = 0;
336         for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
337                 if(i->used)
338                         ++n_images;
339         unsigned n_writes = n_buffers+n_images;
340
341         StructureBuilder sb(buffer, 3);
342         VkWriteDescriptorSet *const &writes = sb.add<VkWriteDescriptorSet>(n_writes);
343         VkDescriptorBufferInfo *const &buffers = sb.add<VkDescriptorBufferInfo>(n_buffers);
344         VkDescriptorImageInfo *const &images = sb.add<VkDescriptorImageInfo>(n_images);
345
346         VkWriteDescriptorSet *write_ptr = writes;
347         VkDescriptorBufferInfo *buffer_ptr = buffers;
348         VkDescriptorImageInfo *image_ptr = images;
349
350         for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
351                 if(i->used)
352                 {
353                         buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
354                         buffer_ptr->offset = i->block->get_offset();
355                         if(i->buffer->get_usage()==STREAMING)
356                                 buffer_ptr->offset += frame*i->buffer->get_size();
357                         buffer_ptr->range = i->block->get_data_size();
358
359                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
360                         write_ptr->dstBinding = i->binding&0xFFFFF;
361                         write_ptr->descriptorCount = 1;
362                         write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
363                         write_ptr->pBufferInfo = buffer_ptr;
364
365                         ++buffer_ptr;
366                         ++write_ptr;
367                 }
368
369         for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
370                 if(i->used)
371                 {
372                         image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
373                         if(i->level<0)
374                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
375                         else
376                                 image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->level]);
377                         image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
378
379                         write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
380                         write_ptr->dstBinding = i->binding&0xFFFFF;
381                         write_ptr->descriptorCount = 1;
382                         write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
383                         write_ptr->pImageInfo = image_ptr;
384
385                         ++image_ptr;
386                         ++write_ptr;
387                 }
388
389         return n_writes;
390 }
391
392 void VulkanPipelineState::apply(const VulkanCommandRecorder &vkCmd, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
393 {
394         const PipelineState &self = *static_cast<const PipelineState *>(this);
395
396         if(!last)
397         {
398                 unapplied = ~0U;
399                 first_changed_desc_set = 0;
400         }
401         else if(last!=this)
402         {
403                 const PipelineState &last_ps = *static_cast<const PipelineState *>(last);
404                 if(handle!=last->handle)
405                 {
406                         unapplied |= PipelineState::SHPROG;
407                         if(self.push_const_compat!=last_ps.push_const_compat)
408                         {
409                                 unapplied |= PipelineState::UNIFORMS;
410                                 first_changed_desc_set = 0;
411                         }
412                 }
413                 if(self.vertex_setup!=last_ps.vertex_setup)
414                         unapplied |= PipelineState::VERTEX_SETUP;
415                 for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
416                         if(i>=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i])
417                         {
418                                 unapplied |= PipelineState::UNIFORMS;
419                                 first_changed_desc_set = min(first_changed_desc_set, i);
420                                 break;
421                         }
422                 if(self.viewport!=last_ps.viewport)
423                         unapplied |= PipelineState::VIEWPORT;
424                 if(self.scissor!=last_ps.scissor)
425                         unapplied |= PipelineState::SCISSOR;
426         }
427
428         if(unapplied&PipelineState::SHPROG)
429                 vkCmd.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
430
431         if(unapplied&PipelineState::VERTEX_SETUP)
432                 if(const VertexSetup *vs = self.vertex_setup)
433                 {
434                         vkCmd.BindVertexBuffers(0, vs->n_bindings, vs->buffers, vs->offsets);
435                         VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
436                         vkCmd.BindIndexBuffer(vs->get_index_buffer()->handle, 0, index_type);
437                 }
438
439         if(!self.uniform_blocks.empty())
440         {
441                 const PipelineState::BoundUniformBlock &first_block = self.uniform_blocks.front();
442                 if(first_block.used && first_block.binding==ReflectData::PUSH_CONSTANT)
443                 {
444                         const UniformBlock &pc_block = *first_block.block;
445                         vkCmd.PushConstants(self.shprog->layout_handle, self.shprog->stage_flags,
446                                 pc_block.get_offset(), pc_block.get_data_size(), pc_block.get_data_pointer());
447                 }
448         }
449
450         if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_slots.empty())
451         {
452                 vector<VkDescriptorSet> descriptor_set_handles;
453                 descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set);
454                 for(unsigned i=first_changed_desc_set; i<descriptor_set_slots.size(); ++i)
455                         descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
456                                 self.descriptor_set_slots[i], self, i, frame));
457
458                 vkCmd.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
459                         first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
460         }
461
462         if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
463         {
464                 Rect fb_rect = self.framebuffer->get_rect();
465
466                 if(unapplied&PipelineState::VIEWPORT)
467                 {
468                         Rect viewport_rect = fb_rect.intersect(self.viewport);
469                         VkViewport viewport = { };
470                         viewport.x = viewport_rect.left;
471                         viewport.y = viewport_rect.bottom;
472                         viewport.width = viewport_rect.width;
473                         viewport.height = viewport_rect.height;
474                         if(negative_viewport)
475                         {
476                                 viewport.y += viewport.height;
477                                 viewport.height = -viewport.height;
478                         }
479                         viewport.minDepth = 0.0f;
480                         viewport.maxDepth = 1.0f;
481                         vkCmd.SetViewport(0, 1, &viewport);
482                 }
483
484                 if(unapplied&PipelineState::SCISSOR)
485                 {
486                         Rect scissor_rect = fb_rect.intersect(self.scissor);
487                         VkRect2D scissor = { };
488                         scissor.offset.x = scissor_rect.left;
489                         scissor.offset.y = scissor_rect.bottom;
490                         scissor.extent.width = scissor_rect.width;
491                         scissor.extent.height = scissor_rect.height;
492                         vkCmd.SetScissor(0, 1, &scissor);
493                 }
494         }
495
496         unapplied = 0;
497         first_changed_desc_set = descriptor_set_slots.size();
498 }
499
500 } // namespace GL
501 } // namespace Msp