Archived
1
Fork 0

Add shader constant support for Vulkan

This commit is contained in:
Joshua Goins 2022-02-07 15:15:09 -05:00
parent 4e0f92014a
commit ad29cf3174
2 changed files with 63 additions and 9 deletions

View file

@ -144,13 +144,13 @@ struct GFXShaderBinding {
struct GFXShaderConstant {
int index = 0;
enum class Type {
Integer
} type;
} type = Type::Integer;
union {
int value;
int value = 0;
};
};

View file

@ -774,7 +774,29 @@ GFXPipeline* GFXVulkan::create_graphics_pipeline(const GFXGraphicsPipelineCreate
std::vector<VkPipelineShaderStageCreateInfo> shaderStages;
if (has_vertex_stage) {
VkSpecializationInfo vertex_specialization_info;
std::vector<VkSpecializationMapEntry> vertex_map_entries;
VkSpecializationInfo fragment_specialization_info;
std::vector<VkSpecializationMapEntry> fragment_map_entries;
const auto fill_map_entries = [](const GFXShaderConstants& constants, std::vector<VkSpecializationMapEntry>& entries) {
for(int i = 0; i < constants.size(); i++) {
// TODO: we only support int specializations (which is okay right now)
VkSpecializationMapEntry entry = {};
entry.constantID = constants[i].index;
entry.size = sizeof(int);
entry.offset = sizeof(int) * i;
entries.push_back(entry);
}
};
std::vector<int> vertex_data;
std::vector<int> fragment_data;
if (has_vertex_stage) {
const bool vertex_use_shader_source = !info.shaders.vertex_src.is_path();
if (vertex_use_shader_source) {
@ -797,9 +819,24 @@ GFXPipeline* GFXVulkan::create_graphics_pipeline(const GFXGraphicsPipelineCreate
vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
vertShaderStageInfo.module = vertex_module;
vertShaderStageInfo.pName = "main";
if(!info.shaders.vertex_constants.empty()) {
fill_map_entries(info.shaders.vertex_constants, vertex_map_entries);
vertex_specialization_info.mapEntryCount = vertex_map_entries.size();
vertex_specialization_info.pMapEntries = vertex_map_entries.data();
for(auto constant : info.shaders.vertex_constants) {
vertex_data.push_back(constant.value);
}
vertex_specialization_info.dataSize = vertex_data.size() * sizeof(int);
vertex_specialization_info.pData = vertex_data.data();
vertShaderStageInfo.pSpecializationInfo = &vertex_specialization_info;
}
shaderStages.push_back(vertShaderStageInfo);
}
}
if (has_fragment_stage) {
const bool fragment_use_shader_source = !info.shaders.fragment_src.is_path();
@ -824,6 +861,21 @@ GFXPipeline* GFXVulkan::create_graphics_pipeline(const GFXGraphicsPipelineCreate
fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
fragShaderStageInfo.module = fragment_module;
fragShaderStageInfo.pName = "main";
if(!info.shaders.fragment_constants.empty()) {
fill_map_entries(info.shaders.fragment_constants, fragment_map_entries);
fragment_specialization_info.mapEntryCount = fragment_map_entries.size();
fragment_specialization_info.pMapEntries = fragment_map_entries.data();
for(auto constant : info.shaders.fragment_constants) {
fragment_data.push_back(constant.value);
}
fragment_specialization_info.dataSize = fragment_data.size() * sizeof(int);
fragment_specialization_info.pData = fragment_data.data();
fragShaderStageInfo.pSpecializationInfo = &fragment_specialization_info;
}
shaderStages.push_back(fragShaderStageInfo);
}
@ -1373,6 +1425,7 @@ void GFXVulkan::submit(GFXCommandBuffer* command_buffer, const platform::window_
{
VkBuffer buffer = ((GFXVulkanBuffer*)command.data.set_vertex_buffer.buffer)->handle;
VkDeviceSize offset = command.data.set_vertex_buffer.offset;
vkCmdBindVertexBuffers(cmd, command.data.set_vertex_buffer.index, 1, &buffer, &offset);
}
break;
@ -1381,7 +1434,7 @@ void GFXVulkan::submit(GFXCommandBuffer* command_buffer, const platform::window_
VkIndexType indexType = VK_INDEX_TYPE_UINT32;
if (command.data.set_index_buffer.index_type == IndexType::UINT16)
indexType = VK_INDEX_TYPE_UINT16;
vkCmdBindIndexBuffer(cmd, ((GFXVulkanBuffer*)command.data.set_index_buffer.buffer)->handle, 0, indexType);
}
break;
@ -1390,7 +1443,7 @@ void GFXVulkan::submit(GFXCommandBuffer* command_buffer, const platform::window_
VkShaderStageFlags applicableStages = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
if(is_compute)
applicableStages = VK_SHADER_STAGE_COMPUTE_BIT;
if(currentPipeline != nullptr)
vkCmdPushConstants(cmd, currentPipeline->layout, applicableStages , 0, command.data.set_push_constant.size, command.data.set_push_constant.bytes.data());
}
@ -1401,7 +1454,7 @@ void GFXVulkan::submit(GFXCommandBuffer* command_buffer, const platform::window_
bsb.buffer = command.data.bind_shader_buffer.buffer;
bsb.offset = command.data.bind_shader_buffer.offset;
bsb.size = command.data.bind_shader_buffer.size;
boundShaderBuffers[command.data.bind_shader_buffer.index] = bsb;
}
break;
@ -1425,8 +1478,9 @@ void GFXVulkan::submit(GFXCommandBuffer* command_buffer, const platform::window_
break;
case GFXCommandType::DrawIndexed:
{
if(try_bind_descriptor())
if(try_bind_descriptor()) {
vkCmdDrawIndexed(cmd, command.data.draw_indexed.index_count, 1, command.data.draw_indexed.first_index, command.data.draw_indexed.vertex_offset, 0);
}
}
break;
case GFXCommandType::SetDepthBias: