+#include <msp/core/algorithm.h>
#include <msp/core/hash.h>
#include "batch.h"
#include "blend.h"
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
+ unapplied |= changes&(PipelineState::VIEWPORT|PipelineState::SCISSOR|PipelineState::VERTEX_SETUP);
+
+ if(changes&PipelineState::VERTEX_SETUP)
+ self.vertex_setup->refresh();
+
+ if(changes&PipelineState::SHPROG)
+ {
+ push_const_compat = hash<32>(self.shprog->stage_flags);
+ push_const_compat = hash_update<32>(push_const_compat, self.shprog->get_push_constants_size());
+ }
+
constexpr unsigned pipeline_mask = PipelineState::SHPROG|PipelineState::VERTEX_SETUP|PipelineState::FACE_CULL|
PipelineState::DEPTH_TEST|PipelineState::STENCIL_TEST|PipelineState::BLEND|PipelineState::PRIMITIVE_TYPE;
- if(self.changes&pipeline_mask)
+ if(changes&pipeline_mask)
+ {
handle = device.get_pipeline_cache().get_pipeline(self);
+ unapplied |= PipelineState::SHPROG;
+ }
- if(self.changes&(PipelineState::SHPROG|PipelineState::UNIFORMS|PipelineState::TEXTURES))
+ if(changes&(PipelineState::SHPROG|PipelineState::UNIFORMS|PipelineState::TEXTURES))
{
- unsigned changed_sets = (self.changes&PipelineState::SHPROG ? ~0U : 0U);
+ unsigned changed_sets = (changes&PipelineState::SHPROG ? ~0U : 0U);
for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
if(u.changed || changed_sets==~0U)
{
- u.used = self.shprog->uses_binding(u.binding);
+ if(u.block)
+ u.used = self.shprog->uses_uniform_block_binding(u.binding);
if(u.binding>=0)
changed_sets |= 1<<(u.binding>>20);
u.changed = false;
for(const PipelineState::BoundTexture &t: self.textures)
if(t.changed || changed_sets==~0U)
{
- t.used = self.shprog->uses_binding(t.binding);
+ if(t.texture && t.sampler)
+ t.used = self.shprog->uses_texture_binding(t.binding);
changed_sets |= 1<<(t.binding>>20);
+ if(t.texture && t.level>=0)
+ t.texture->refresh_mip_views();
if(t.sampler)
t.sampler->refresh();
t.changed = false;
}
- descriptor_set_handles.resize(self.shprog->get_n_descriptor_sets());
- for(unsigned i=0; i<descriptor_set_handles.size(); ++i)
- if(changed_sets&(1<<i))
- descriptor_set_handles[i] = device.get_pipeline_cache().get_descriptor_set(self, i);
+ if(changed_sets)
+ {
+ descriptor_set_slots.resize(self.shprog->get_n_descriptor_sets());
+ first_changed_desc_set = descriptor_set_slots.size();
+ for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
+ if(changed_sets&(1<<i))
+ {
+ descriptor_set_slots[i] = device.get_descriptor_pool().get_descriptor_set_slot(self, i);
+ first_changed_desc_set = min(first_changed_desc_set, i);
+ }
+
+ unapplied |= PipelineState::UNIFORMS;
+ }
}
- self.changes = 0;
+ changes = 0;
}
uint64_t VulkanPipelineState::compute_hash() const
result = hash_round<64>(result, format.get_samples());
- if(const DepthTest *depth_test = self.depth_test)
- if(depth_test->enabled)
- {
- result = hash_round<64>(result, depth_test->compare);
- result = hash_update<64>(result, depth_test->write);
- }
+ if(self.depth_test.enabled)
+ {
+ result = hash_round<64>(result, self.depth_test.compare);
+ result = hash_update<64>(result, self.depth_test.write);
+ }
- if(const StencilTest *stencil_test = self.stencil_test)
- if(stencil_test->enabled)
- {
- result = hash_round<64>(result, stencil_test->compare);
- result = hash_round<64>(result, stencil_test->stencil_fail_op);
- result = hash_round<64>(result, stencil_test->depth_fail_op);
- result = hash_round<64>(result, stencil_test->depth_pass_op);
- result = hash_update<64>(result, stencil_test->reference);
- }
+ if(self.stencil_test.enabled)
+ {
+ result = hash_round<64>(result, self.stencil_test.compare);
+ result = hash_round<64>(result, self.stencil_test.stencil_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_fail_op);
+ result = hash_round<64>(result, self.stencil_test.depth_pass_op);
+ result = hash_update<64>(result, self.stencil_test.reference);
+ }
- if(const Blend *blend = self.blend)
- if(blend->enabled)
- {
- result = hash_round<64>(result, blend->equation);
- result = hash_round<64>(result, blend->src_factor);
- result = hash_round<64>(result, blend->dst_factor);
- result = hash_round<64>(result, blend->write_mask);
- }
+ if(self.blend.enabled)
+ {
+ result = hash_round<64>(result, self.blend.equation);
+ result = hash_round<64>(result, self.blend.src_factor);
+ result = hash_round<64>(result, self.blend.dst_factor);
+ result = hash_round<64>(result, self.blend.write_mask);
+ }
for(FrameAttachment a: format)
result = hash_update<64>(result, a);
raster_info->depthClampEnable = VK_FALSE;
raster_info->rasterizerDiscardEnable = VK_FALSE;
raster_info->polygonMode = VK_POLYGON_MODE_FILL;
+ raster_info->frontFace = (self.front_face==CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE);
if(self.face_cull==NO_CULL || self.front_face==NON_MANIFOLD)
- {
raster_info->cullMode = VK_CULL_MODE_NONE;
- raster_info->frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
- }
else
- {
raster_info->cullMode = (self.face_cull==CULL_FRONT ? VK_CULL_MODE_FRONT_BIT : VK_CULL_MODE_BACK_BIT);
- raster_info->frontFace = (self.front_face==CLOCKWISE ? VK_FRONT_FACE_CLOCKWISE : VK_FRONT_FACE_COUNTER_CLOCKWISE);
- }
raster_info->depthBiasEnable = VK_FALSE;
raster_info->depthBiasConstantFactor = 0.0f;
raster_info->depthBiasClamp = 0.0f;
multisample_info->alphaToOneEnable = VK_FALSE;
depth_stencil_info->sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
- if(const DepthTest *depth_test = self.depth_test)
- {
- depth_stencil_info->depthTestEnable = depth_test->enabled;
- depth_stencil_info->depthWriteEnable = depth_test->write;
- depth_stencil_info->depthCompareOp = static_cast<VkCompareOp>(get_vulkan_predicate(depth_test->compare));
- depth_stencil_info->depthBoundsTestEnable = VK_FALSE;
- }
- if(const StencilTest *stencil_test = self.stencil_test)
+ depth_stencil_info->depthTestEnable = self.depth_test.enabled;
+ depth_stencil_info->depthWriteEnable = self.depth_test.write;
+ depth_stencil_info->depthCompareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.depth_test.compare));
+ depth_stencil_info->depthBoundsTestEnable = VK_FALSE;
+
+ depth_stencil_info->stencilTestEnable = self.stencil_test.enabled;
+ depth_stencil_info->front.failOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.stencil_fail_op));
+ depth_stencil_info->front.passOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_pass_op));
+ depth_stencil_info->front.depthFailOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(self.stencil_test.depth_fail_op));
+ depth_stencil_info->front.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(self.stencil_test.compare));
+ depth_stencil_info->front.compareMask = 0xFFFFFFFFU;
+ depth_stencil_info->front.writeMask = 0xFFFFFFFFU;
+ depth_stencil_info->front.reference = self.stencil_test.reference;
+ depth_stencil_info->back = depth_stencil_info->front;
+
+ for(unsigned i=0; i<n_color_attachments; ++i)
{
- depth_stencil_info->stencilTestEnable = stencil_test->enabled;
- depth_stencil_info->front.failOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(stencil_test->stencil_fail_op));
- depth_stencil_info->front.passOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(stencil_test->depth_pass_op));
- depth_stencil_info->front.depthFailOp = static_cast<VkStencilOp>(get_vulkan_stencil_op(stencil_test->depth_fail_op));
- depth_stencil_info->front.compareOp = static_cast<VkCompareOp>(get_vulkan_predicate(stencil_test->compare));
- depth_stencil_info->front.compareMask = 0xFFFFFFFFU;
- depth_stencil_info->front.writeMask = 0xFFFFFFFFU;
- depth_stencil_info->front.reference = stencil_test->reference;
- depth_stencil_info->back = depth_stencil_info->front;
- }
-
- if(const Blend *blend = self.blend)
- {
- for(unsigned i=0; i<n_color_attachments; ++i)
- {
- blend_attachments[i].blendEnable = blend->enabled;
- blend_attachments[i].srcColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(blend->src_factor));
- blend_attachments[i].dstColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(blend->dst_factor));
- blend_attachments[i].colorBlendOp = static_cast<VkBlendOp>(get_vulkan_blend_equation(blend->equation));
- blend_attachments[i].srcAlphaBlendFactor = blend_attachments[i].srcColorBlendFactor;
- blend_attachments[i].dstAlphaBlendFactor = blend_attachments[i].dstColorBlendFactor;
- blend_attachments[i].alphaBlendOp = blend_attachments[i].colorBlendOp;
- blend_attachments[i].colorWriteMask = get_vulkan_color_mask(blend->write_mask);
- }
- }
- else
- {
- for(unsigned i=0; i<n_color_attachments; ++i)
- blend_attachments[i].colorWriteMask = VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT;
+ blend_attachments[i].blendEnable = self.blend.enabled;
+ blend_attachments[i].srcColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.src_factor));
+ blend_attachments[i].dstColorBlendFactor = static_cast<VkBlendFactor>(get_vulkan_blend_factor(self.blend.dst_factor));
+ blend_attachments[i].colorBlendOp = static_cast<VkBlendOp>(get_vulkan_blend_equation(self.blend.equation));
+ blend_attachments[i].srcAlphaBlendFactor = blend_attachments[i].srcColorBlendFactor;
+ blend_attachments[i].dstAlphaBlendFactor = blend_attachments[i].dstColorBlendFactor;
+ blend_attachments[i].alphaBlendOp = blend_attachments[i].colorBlendOp;
+ blend_attachments[i].colorWriteMask = get_vulkan_color_mask(self.blend.write_mask);
}
blend_info->sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
}
if(self.vertex_setup)
- {
- self.vertex_setup->refresh();
pipeline_info->pVertexInputState = reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>(self.vertex_setup->creation_info.data());
- }
}
uint64_t VulkanPipelineState::compute_descriptor_set_hash(unsigned index) const
const PipelineState &self = *static_cast<const PipelineState *>(this);
uint64_t result = hash<64>(0, 0);
- for(const PipelineState::BoundUniformBlock &b: self.uniform_blocks)
- if(b.used && b.binding>=0 && static_cast<unsigned>(b.binding>>20)==index)
+ bool empty = true;
+
+ auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
+ for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used)
{
- result = hash_update<64>(result, b.binding);
- result = hash_update<64>(result, reinterpret_cast<uintptr_t>(b.block));
+ result = hash_update<64>(result, i->binding);
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->block));
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(i->buffer->handle));
+ empty = false;
}
- for(const PipelineState::BoundTexture &t: self.textures)
- if(t.used && (t.binding>>20)==index)
+
+ auto j = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
+ for(; (j!=self.textures.end() && j->binding>>20==index); ++j)
+ if(j->used)
{
- result = hash_update<64>(result, t.binding);
- result = hash_update<64>(result, reinterpret_cast<uintptr_t>(t.texture));
- result = hash_update<64>(result, reinterpret_cast<uintptr_t>(t.sampler));
+ result = hash_update<64>(result, j->binding);
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->texture->handle));
+ result = hash_update<64>(result, reinterpret_cast<uintptr_t>(j->sampler->handle));
+ result = hash_update<64>(result, j->level);
+ empty = false;
}
+ if(!empty)
+ result = hash_update<64>(result, self.shprog->stage_flags);
+
return result;
}
+bool VulkanPipelineState::is_descriptor_set_dynamic(unsigned index) const
+{
+ const PipelineState &self = *static_cast<const PipelineState *>(this);
+
+ auto i = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
+ for(; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used && i->buffer && i->buffer->get_usage()==STREAMING)
+ return true;
+
+ return false;
+}
+
VkDescriptorSetLayout VulkanPipelineState::get_descriptor_set_layout(unsigned index) const
{
return static_cast<const PipelineState *>(this)->shprog->desc_set_layout_handles[index];
}
-unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, vector<char> &buffer) const
+unsigned VulkanPipelineState::fill_descriptor_writes(unsigned index, unsigned frame, vector<char> &buffer) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
+ auto u_begin = lower_bound_member(self.uniform_blocks, static_cast<int>(index)<<20, &PipelineState::BoundUniformBlock::binding);
+ auto t_begin = lower_bound_member(self.textures, index<<20, &PipelineState::BoundTexture::binding);
+
unsigned n_buffers = 0;
- for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
- if(u.used && u.binding>=0 && static_cast<unsigned>(u.binding>>20)==index)
+ for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used)
++n_buffers;
unsigned n_images = 0;
- for(const PipelineState::BoundTexture &t: self.textures)
- if(t.used && (t.binding>>20)==index)
+ for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
+ if(i->used)
++n_images;
unsigned n_writes = n_buffers+n_images;
VkDescriptorBufferInfo *buffer_ptr = buffers;
VkDescriptorImageInfo *image_ptr = images;
- for(const PipelineState::BoundUniformBlock &u: self.uniform_blocks)
- if(u.used && u.binding>=0 && static_cast<unsigned>(u.binding>>20)==index)
+ for(auto i=u_begin; (i!=self.uniform_blocks.end() && static_cast<unsigned>(i->binding)>>20==index); ++i)
+ if(i->used)
{
- buffer_ptr->buffer = handle_cast<::VkBuffer>(u.block->get_buffer()->handle);
- buffer_ptr->offset = u.block->get_offset();
- buffer_ptr->range = u.block->get_data_size();
+ buffer_ptr->buffer = handle_cast<::VkBuffer>(i->buffer->handle);
+ buffer_ptr->offset = i->block->get_offset();
+ if(i->buffer->get_usage()==STREAMING)
+ buffer_ptr->offset += frame*i->buffer->get_size();
+ buffer_ptr->range = i->block->get_data_size();
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- write_ptr->dstBinding = u.binding&0xFFFFF;
+ write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
write_ptr->pBufferInfo = buffer_ptr;
++write_ptr;
}
- for(const PipelineState::BoundTexture &t: self.textures)
- if(t.used && (t.binding>>20)==index)
+ for(auto i=t_begin; (i!=self.textures.end() && i->binding>>20==index); ++i)
+ if(i->used)
{
- image_ptr->sampler = handle_cast<::VkSampler>(t.sampler->handle);
- image_ptr->imageView = handle_cast<::VkImageView>(t.texture->view_handle);
+ image_ptr->sampler = handle_cast<::VkSampler>(i->sampler->handle);
+ if(i->level<0)
+ image_ptr->imageView = handle_cast<::VkImageView>(i->texture->view_handle);
+ else
+ image_ptr->imageView = handle_cast<::VkImageView>(i->texture->mip_view_handles[i->level]);
image_ptr->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
write_ptr->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
- write_ptr->dstBinding = t.binding&0xFFFFF;
+ write_ptr->dstBinding = i->binding&0xFFFFF;
write_ptr->descriptorCount = 1;
write_ptr->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
write_ptr->pImageInfo = image_ptr;
return n_writes;
}
-void VulkanPipelineState::apply(VkCommandBuffer command_buffer) const
+void VulkanPipelineState::apply(VkCommandBuffer command_buffer, const VulkanPipelineState *last, unsigned frame, bool negative_viewport) const
{
const PipelineState &self = *static_cast<const PipelineState *>(this);
const VulkanFunctions &vk = device.get_functions();
- vk.CmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
- if(const VertexSetup *vs = self.vertex_setup)
+ if(!last)
{
- vk.CmdBindVertexBuffers(command_buffer, 0, vs->n_bindings, vs->buffers, vs->offsets);
- VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
- vk.CmdBindIndexBuffer(command_buffer, vs->get_index_buffer()->handle, 0, index_type);
+ unapplied = ~0U;
+ first_changed_desc_set = 0;
}
+ else if(last!=this)
+ {
+ const PipelineState &last_ps = *static_cast<const PipelineState *>(last);
+ if(handle!=last->handle)
+ {
+ unapplied |= PipelineState::SHPROG;
+ if(self.push_const_compat!=last_ps.push_const_compat)
+ {
+ unapplied |= PipelineState::UNIFORMS;
+ first_changed_desc_set = 0;
+ }
+ }
+ if(self.vertex_setup!=last_ps.vertex_setup)
+ unapplied |= PipelineState::VERTEX_SETUP;
+ for(unsigned i=0; i<descriptor_set_slots.size(); ++i)
+ if(i>=last->descriptor_set_slots.size() || descriptor_set_slots[i]!=last->descriptor_set_slots[i])
+ {
+ unapplied |= PipelineState::UNIFORMS;
+ first_changed_desc_set = min(first_changed_desc_set, i);
+ break;
+ }
+ if(self.viewport!=last_ps.viewport)
+ unapplied |= PipelineState::VIEWPORT;
+ if(self.scissor!=last_ps.scissor)
+ unapplied |= PipelineState::SCISSOR;
+ }
+
+ if(unapplied&PipelineState::SHPROG)
+ vk.CmdBindPipeline(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, handle);
+
+ if(unapplied&PipelineState::VERTEX_SETUP)
+ if(const VertexSetup *vs = self.vertex_setup)
+ {
+ vk.CmdBindVertexBuffers(command_buffer, 0, vs->n_bindings, vs->buffers, vs->offsets);
+ VkIndexType index_type = static_cast<VkIndexType>(get_vulkan_index_type(vs->get_index_type()));
+ vk.CmdBindIndexBuffer(command_buffer, vs->get_index_buffer()->handle, 0, index_type);
+ }
if(!self.uniform_blocks.empty())
{
if(first_block.used && first_block.binding==ReflectData::PUSH_CONSTANT)
{
const UniformBlock &pc_block = *first_block.block;
- vk.CmdPushConstants(command_buffer, self.shprog->layout_handle, VK_SHADER_STAGE_ALL,
+ vk.CmdPushConstants(command_buffer, self.shprog->layout_handle, self.shprog->stage_flags,
pc_block.get_offset(), pc_block.get_data_size(), pc_block.get_data_pointer());
}
}
- vk.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle, 0, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
-
- VkViewport viewport = { };
- if(self.viewport)
- {
- viewport.x = self.viewport->left;
- viewport.y = self.framebuffer->get_height()-(self.viewport->bottom+self.viewport->height);
- viewport.width = self.viewport->width;
- viewport.height = self.viewport->height;
- }
- else
+ if((unapplied&PipelineState::UNIFORMS) && !descriptor_set_slots.empty())
{
- viewport.x = 0;
- viewport.y = 0;
- viewport.width = self.framebuffer->get_width();
- viewport.height = self.framebuffer->get_height();
+ vector<VkDescriptorSet> descriptor_set_handles;
+ descriptor_set_handles.reserve(descriptor_set_slots.size()-first_changed_desc_set);
+ for(unsigned i=first_changed_desc_set; i<descriptor_set_slots.size(); ++i)
+ descriptor_set_handles.push_back(device.get_descriptor_pool().get_descriptor_set(
+ self.descriptor_set_slots[i], self, i, frame));
+
+ vk.CmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, self.shprog->layout_handle,
+ first_changed_desc_set, descriptor_set_handles.size(), descriptor_set_handles.data(), 0, 0);
}
- viewport.minDepth = 0.0f;
- viewport.maxDepth = 1.0f;
- vk.CmdSetViewport(command_buffer, 0, 1, &viewport);
- VkRect2D scissor = { };
- if(self.scissor)
- {
- scissor.offset.x = self.scissor->left;
- scissor.offset.y = self.framebuffer->get_height()-(self.scissor->bottom+self.scissor->height);
- scissor.extent.width = self.scissor->width;
- scissor.extent.height = self.scissor->height;
- }
- else
+ if(unapplied&(PipelineState::VIEWPORT|PipelineState::SCISSOR))
{
- scissor.offset.x = 0;
- scissor.offset.y = 0;
- scissor.extent.width = self.framebuffer->get_width();
- scissor.extent.height = self.framebuffer->get_height();
+ Rect fb_rect = self.framebuffer->get_rect();
+
+ if(unapplied&PipelineState::VIEWPORT)
+ {
+ Rect viewport_rect = fb_rect.intersect(self.viewport);
+ VkViewport viewport = { };
+ viewport.x = viewport_rect.left;
+ viewport.y = viewport_rect.bottom;
+ viewport.width = viewport_rect.width;
+ viewport.height = viewport_rect.height;
+ if(negative_viewport)
+ {
+ viewport.y += viewport.height;
+ viewport.height = -viewport.height;
+ }
+ viewport.minDepth = 0.0f;
+ viewport.maxDepth = 1.0f;
+ vk.CmdSetViewport(command_buffer, 0, 1, &viewport);
+ }
+
+ if(unapplied&PipelineState::SCISSOR)
+ {
+ Rect scissor_rect = fb_rect.intersect(self.scissor);
+ VkRect2D scissor = { };
+ scissor.offset.x = scissor_rect.left;
+ scissor.offset.y = scissor_rect.bottom;
+ scissor.extent.width = scissor_rect.width;
+ scissor.extent.height = scissor_rect.height;
+ vk.CmdSetScissor(command_buffer, 0, 1, &scissor);
+ }
}
- vk.CmdSetScissor(command_buffer, 0, 1, &scissor);
+
+ unapplied = 0;
+ first_changed_desc_set = descriptor_set_slots.size();
}
} // namespace GL