blob: f8bb462a9cdae1664ab249a02ac027081b008191 [file] [log] [blame]
Alexis Hetu000df8b2018-10-24 15:22:41 -04001// Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "VkPipeline.hpp"
Nicolas Capensa29aa772019-06-26 00:36:28 -040016
Trevor David Black3ad285a2020-05-26 19:28:05 +000017#include "VkDestroy.hpp"
Nicolas Capensa29aa772019-06-26 00:36:28 -040018#include "VkDevice.hpp"
Alexis Hetu52edb172019-06-26 10:17:18 -040019#include "VkPipelineCache.hpp"
Ben Clayton76e9bc02019-02-26 15:02:18 +000020#include "VkPipelineLayout.hpp"
Ben Clayton2ed93ab2019-12-17 20:38:03 +000021#include "VkRenderPass.hpp"
Alexis Hetuc0f92f22018-11-15 16:25:38 -050022#include "VkShaderModule.hpp"
Alexis Hetu7b29abf2019-11-21 17:26:17 -050023#include "VkStringify.hpp"
Ben Claytonf2be26a2019-03-08 12:02:05 +000024#include "Pipeline/ComputeProgram.hpp"
Nicolas Capens73c31242019-02-12 00:09:23 -050025#include "Pipeline/SpirvShader.hpp"
26
Ben Claytone693b622019-09-05 12:48:37 +010027#include "marl/trace.h"
Ben Claytond6c61362019-08-14 18:16:01 +010028
Nicolas Capens73c31242019-02-12 00:09:23 -050029#include "spirv-tools/optimizer.hpp"
Alexis Hetuc0f92f22018-11-15 16:25:38 -050030
Ben Clayton62758f52019-03-13 14:18:58 +000031#include <iostream>
32
Nicolas Capens157ba262019-12-10 17:49:14 -050033namespace {
Alexis Hetuc0f92f22018-11-15 16:25:38 -050034
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -040035// preprocessSpirv applies and freezes specializations into constants, and inlines all functions.
Ben Clayton45faa082019-03-05 13:20:40 +000036std::vector<uint32_t> preprocessSpirv(
Ben Clayton2ed93ab2019-12-17 20:38:03 +000037 std::vector<uint32_t> const &code,
Ben Clayton5349f3c2020-01-14 11:50:24 +000038 VkSpecializationInfo const *specializationInfo,
39 bool optimize)
Ben Clayton45faa082019-03-05 13:20:40 +000040{
Ben Clayton2ed93ab2019-12-17 20:38:03 +000041 spvtools::Optimizer opt{ SPV_ENV_VULKAN_1_1 };
Ben Clayton45faa082019-03-05 13:20:40 +000042
Nicolas Capens32f4be12020-06-10 13:14:07 -040043 opt.SetMessageConsumer([](spv_message_level_t level, const char *source, const spv_position_t &position, const char *message) {
Nicolas Capens81bc9d92019-12-16 15:05:57 -050044 switch(level)
Ben Clayton45faa082019-03-05 13:20:40 +000045 {
Nicolas Capens32f4be12020-06-10 13:14:07 -040046 case SPV_MSG_FATAL: sw::warn("SPIR-V FATAL: %d:%d %s\n", int(position.line), int(position.column), message);
47 case SPV_MSG_INTERNAL_ERROR: sw::warn("SPIR-V INTERNAL_ERROR: %d:%d %s\n", int(position.line), int(position.column), message);
48 case SPV_MSG_ERROR: sw::warn("SPIR-V ERROR: %d:%d %s\n", int(position.line), int(position.column), message);
49 case SPV_MSG_WARNING: sw::warn("SPIR-V WARNING: %d:%d %s\n", int(position.line), int(position.column), message);
50 case SPV_MSG_INFO: sw::trace("SPIR-V INFO: %d:%d %s\n", int(position.line), int(position.column), message);
51 case SPV_MSG_DEBUG: sw::trace("SPIR-V DEBUG: %d:%d %s\n", int(position.line), int(position.column), message);
52 default: sw::trace("SPIR-V MESSAGE: %d:%d %s\n", int(position.line), int(position.column), message);
Ben Clayton45faa082019-03-05 13:20:40 +000053 }
54 });
55
Ben Clayton45faa082019-03-05 13:20:40 +000056 // If the pipeline uses specialization, apply the specializations before freezing
Nicolas Capens81bc9d92019-12-16 15:05:57 -050057 if(specializationInfo)
Ben Clayton45faa082019-03-05 13:20:40 +000058 {
59 std::unordered_map<uint32_t, std::vector<uint32_t>> specializations;
Nicolas Capens81bc9d92019-12-16 15:05:57 -050060 for(auto i = 0u; i < specializationInfo->mapEntryCount; ++i)
Ben Clayton45faa082019-03-05 13:20:40 +000061 {
62 auto const &e = specializationInfo->pMapEntries[i];
63 auto value_ptr =
Ben Clayton2ed93ab2019-12-17 20:38:03 +000064 static_cast<uint32_t const *>(specializationInfo->pData) + e.offset / sizeof(uint32_t);
Ben Clayton45faa082019-03-05 13:20:40 +000065 specializations.emplace(e.constantID,
Ben Clayton2ed93ab2019-12-17 20:38:03 +000066 std::vector<uint32_t>{ value_ptr, value_ptr + e.size / sizeof(uint32_t) });
Ben Clayton45faa082019-03-05 13:20:40 +000067 }
68 opt.RegisterPass(spvtools::CreateSetSpecConstantDefaultValuePass(specializations));
69 }
Ben Clayton45faa082019-03-05 13:20:40 +000070
Ben Clayton5349f3c2020-01-14 11:50:24 +000071 if(optimize)
72 {
73 // Full optimization list taken from spirv-opt.
74 opt.RegisterPerformancePasses();
75 }
Ben Clayton4fa92a72019-04-03 11:03:53 +010076
Ben Clayton45faa082019-03-05 13:20:40 +000077 std::vector<uint32_t> optimized;
78 opt.Run(code.data(), code.size(), &optimized);
Ben Clayton62758f52019-03-13 14:18:58 +000079
Ben Clayton2ed93ab2019-12-17 20:38:03 +000080 if(false)
81 {
Ben Clayton62758f52019-03-13 14:18:58 +000082 spvtools::SpirvTools core(SPV_ENV_VULKAN_1_1);
83 std::string preOpt;
Ben Clayton555c3332019-03-28 17:15:04 +000084 core.Disassemble(code, &preOpt, SPV_BINARY_TO_TEXT_OPTION_NONE);
Ben Clayton62758f52019-03-13 14:18:58 +000085 std::string postOpt;
Ben Clayton555c3332019-03-28 17:15:04 +000086 core.Disassemble(optimized, &postOpt, SPV_BINARY_TO_TEXT_OPTION_NONE);
Ben Clayton62758f52019-03-13 14:18:58 +000087 std::cout << "PRE-OPT: " << preOpt << std::endl
Ben Clayton2ed93ab2019-12-17 20:38:03 +000088 << "POST-OPT: " << postOpt << std::endl;
Ben Clayton62758f52019-03-13 14:18:58 +000089 }
90
Ben Clayton45faa082019-03-05 13:20:40 +000091 return optimized;
Alexis Hetuc0f92f22018-11-15 16:25:38 -050092}
Alexis Hetu000df8b2018-10-24 15:22:41 -040093
Ben Clayton7d0ce412019-12-03 13:26:31 +000094std::shared_ptr<sw::SpirvShader> createShader(
95 const vk::PipelineCache::SpirvShaderKey &key,
96 const vk::ShaderModule *module,
97 bool robustBufferAccess,
98 const std::shared_ptr<vk::dbg::Context> &dbgctx)
Alexis Hetu52edb172019-06-26 10:17:18 -040099{
Ben Clayton5349f3c2020-01-14 11:50:24 +0000100 // Do not optimize the shader if we have a debugger context.
101 // Optimization passes are likely to damage debug information, and reorder
102 // instructions.
103 const bool optimize = !dbgctx;
104
105 // TODO(b/147726513): Do not preprocess the shader if we have a debugger
106 // context.
107 // This is a work-around for the SPIR-V tools incorrectly reporting errors
108 // when debug information is provided. This can be removed once the
109 // following SPIR-V tools bugs are fixed:
110 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3102
111 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3103
112 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3118
113 auto code = dbgctx ? key.getInsns() : preprocessSpirv(key.getInsns(), key.getSpecializationInfo(), optimize);
Alexis Hetu52edb172019-06-26 10:17:18 -0400114 ASSERT(code.size() > 0);
115
116 // If the pipeline has specialization constants, assume they're unique and
117 // use a new serial ID so the shader gets recompiled.
118 uint32_t codeSerialID = (key.getSpecializationInfo() ? vk::ShaderModule::nextSerialID() : module->getSerialID());
119
120 // TODO(b/119409619): use allocator.
121 return std::make_shared<sw::SpirvShader>(codeSerialID, key.getPipelineStage(), key.getEntryPointName().c_str(),
Ben Clayton7d0ce412019-12-03 13:26:31 +0000122 code, key.getRenderPass(), key.getSubpassIndex(), robustBufferAccess, dbgctx);
Alexis Hetu52edb172019-06-26 10:17:18 -0400123}
124
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000125std::shared_ptr<sw::ComputeProgram> createProgram(const vk::PipelineCache::ComputeProgramKey &key)
Alexis Hetu52edb172019-06-26 10:17:18 -0400126{
Ben Claytone693b622019-09-05 12:48:37 +0100127 MARL_SCOPED_EVENT("createProgram");
Ben Claytond6c61362019-08-14 18:16:01 +0100128
Alexis Hetu52edb172019-06-26 10:17:18 -0400129 vk::DescriptorSet::Bindings descriptorSets; // FIXME(b/129523279): Delay code generation until invoke time.
130 // TODO(b/119409619): use allocator.
131 auto program = std::make_shared<sw::ComputeProgram>(key.getShader(), key.getLayout(), descriptorSets);
132 program->generate();
Ben Clayton056d6922019-07-04 12:41:13 +0100133 program->finalize();
Alexis Hetu52edb172019-06-26 10:17:18 -0400134 return program;
135}
136
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000137} // anonymous namespace
Ben Clayton45faa082019-03-05 13:20:40 +0000138
Nicolas Capens157ba262019-12-10 17:49:14 -0500139namespace vk {
Alexis Hetu000df8b2018-10-24 15:22:41 -0400140
Trevor David Black3ad285a2020-05-26 19:28:05 +0000141Pipeline::Pipeline(PipelineLayout *layout, const Device *device)
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000142 : layout(layout)
Ben Clayton7d0ce412019-12-03 13:26:31 +0000143 , device(device)
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000144 , robustBufferAccess(device->getEnabledFeatures().robustBufferAccess)
Nicolas Capensa29aa772019-06-26 00:36:28 -0400145{
Trevor David Black3ad285a2020-05-26 19:28:05 +0000146 layout->incRefCount();
147}
148
149void Pipeline::destroy(const VkAllocationCallbacks *pAllocator)
150{
151 destroyPipeline(pAllocator);
152
153 vk::release(static_cast<VkPipelineLayout>(*layout), pAllocator);
Nicolas Capensa29aa772019-06-26 00:36:28 -0400154}
Ben Clayton76e9bc02019-02-26 15:02:18 +0000155
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000156GraphicsPipeline::GraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, void *mem, const Device *device)
157 : Pipeline(vk::Cast(pCreateInfo->layout), device)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400158{
Alexis Hetuf9c8d5f2019-09-12 09:54:54 -0400159 context.robustBufferAccess = robustBufferAccess;
160
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500161 if((pCreateInfo->flags &
162 ~(VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT |
163 VK_PIPELINE_CREATE_DERIVATIVE_BIT |
164 VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500165 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500166 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
167 }
168
Alexis Hetu73832432019-04-11 16:43:18 -0400169 if(pCreateInfo->pDynamicState)
170 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500171 if(pCreateInfo->pDynamicState->flags != 0)
172 {
173 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
174 UNSUPPORTED("pCreateInfo->pDynamicState->flags %d", int(pCreateInfo->pDynamicState->flags));
175 }
176
Alexis Hetu73832432019-04-11 16:43:18 -0400177 for(uint32_t i = 0; i < pCreateInfo->pDynamicState->dynamicStateCount; i++)
178 {
179 VkDynamicState dynamicState = pCreateInfo->pDynamicState->pDynamicStates[i];
180 switch(dynamicState)
181 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000182 case VK_DYNAMIC_STATE_VIEWPORT:
183 case VK_DYNAMIC_STATE_SCISSOR:
184 case VK_DYNAMIC_STATE_LINE_WIDTH:
185 case VK_DYNAMIC_STATE_DEPTH_BIAS:
186 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
187 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
188 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
189 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
190 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
191 ASSERT(dynamicState < (sizeof(dynamicStateFlags) * 8));
192 dynamicStateFlags |= (1 << dynamicState);
193 break;
194 default:
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500195 UNSUPPORTED("VkDynamicState %d", int(dynamicState));
Alexis Hetu73832432019-04-11 16:43:18 -0400196 }
197 }
198 }
199
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000200 const VkPipelineVertexInputStateCreateInfo *vertexInputState = pCreateInfo->pVertexInputState;
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500201
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500202 if(vertexInputState->flags != 0)
203 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500204 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
Nicolas Capens865f8892020-01-21 14:27:10 -0500205 UNSUPPORTED("vertexInputState->flags");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500206 }
207
Ben Clayton76e9bc02019-02-26 15:02:18 +0000208 // Context must always have a PipelineLayout set.
209 context.pipelineLayout = layout;
210
Chris Forbesfe3d4972019-02-22 17:21:23 -0800211 // Temporary in-binding-order representation of buffer strides, to be consumed below
212 // when considering attributes. TODO: unfuse buffers from attributes in backend, is old GL model.
Chris Forbese1cf8632019-03-08 18:17:35 -0800213 uint32_t vertexStrides[MAX_VERTEX_INPUT_BINDINGS];
214 uint32_t instanceStrides[MAX_VERTEX_INPUT_BINDINGS];
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500215 for(uint32_t i = 0; i < vertexInputState->vertexBindingDescriptionCount; i++)
216 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000217 auto const &desc = vertexInputState->pVertexBindingDescriptions[i];
Chris Forbese1cf8632019-03-08 18:17:35 -0800218 vertexStrides[desc.binding] = desc.inputRate == VK_VERTEX_INPUT_RATE_VERTEX ? desc.stride : 0;
219 instanceStrides[desc.binding] = desc.inputRate == VK_VERTEX_INPUT_RATE_INSTANCE ? desc.stride : 0;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500220 }
221
222 for(uint32_t i = 0; i < vertexInputState->vertexAttributeDescriptionCount; i++)
223 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000224 auto const &desc = vertexInputState->pVertexAttributeDescriptions[i];
225 sw::Stream &input = context.input[desc.location];
Alexis Hetub766e5e2020-01-20 11:40:28 -0500226 input.format = desc.format;
Chris Forbesfe3d4972019-02-22 17:21:23 -0800227 input.offset = desc.offset;
228 input.binding = desc.binding;
Chris Forbese1cf8632019-03-08 18:17:35 -0800229 input.vertexStride = vertexStrides[desc.binding];
230 input.instanceStride = instanceStrides[desc.binding];
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500231 }
232
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500233 const VkPipelineInputAssemblyStateCreateInfo *inputAssemblyState = pCreateInfo->pInputAssemblyState;
234
235 if(inputAssemblyState->flags != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500236 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500237 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
238 UNSUPPORTED("pCreateInfo->pInputAssemblyState->flags %d", int(pCreateInfo->pInputAssemblyState->flags));
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500239 }
240
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500241 primitiveRestartEnable = (inputAssemblyState->primitiveRestartEnable != VK_FALSE);
242 context.topology = inputAssemblyState->topology;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500243
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000244 const VkPipelineRasterizationStateCreateInfo *rasterizationState = pCreateInfo->pRasterizationState;
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500245
246 if(rasterizationState->flags != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500247 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500248 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
249 UNSUPPORTED("pCreateInfo->pRasterizationState->flags %d", int(pCreateInfo->pRasterizationState->flags));
250 }
251
252 if(rasterizationState->depthClampEnable != VK_FALSE)
253 {
254 UNSUPPORTED("VkPhysicalDeviceFeatures::depthClamp");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500255 }
256
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500257 context.rasterizerDiscard = (rasterizationState->rasterizerDiscardEnable != VK_FALSE);
Chris Forbesd2452552019-02-22 09:45:06 -0800258 context.cullMode = rasterizationState->cullMode;
Alexis Hetu72d81042019-06-10 10:23:23 -0400259 context.frontFace = rasterizationState->frontFace;
Ben Clayton2210f802019-08-12 13:55:43 +0100260 context.polygonMode = rasterizationState->polygonMode;
Nicolas Capens85035be2019-06-05 13:54:18 -0400261 context.depthBias = (rasterizationState->depthBiasEnable != VK_FALSE) ? rasterizationState->depthBiasConstantFactor : 0.0f;
262 context.slopeDepthBias = (rasterizationState->depthBiasEnable != VK_FALSE) ? rasterizationState->depthBiasSlopeFactor : 0.0f;
Nicolas Capens15d3dda2020-06-11 21:40:43 -0400263 context.lineWidth = rasterizationState->lineWidth;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500264
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000265 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(rasterizationState->pNext);
Alexis Hetu23f9c252019-10-17 13:58:25 -0400266 while(extensionCreateInfo)
267 {
Alexis Hetub07147b2019-10-29 12:49:24 -0400268 // Casting to a long since some structures, such as
269 // VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT
270 // are not enumerated in the official Vulkan header
271 switch((long)(extensionCreateInfo->sType))
Alexis Hetu23f9c252019-10-17 13:58:25 -0400272 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000273 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
274 {
275 const VkPipelineRasterizationLineStateCreateInfoEXT *lineStateCreateInfo = reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>(extensionCreateInfo);
276 context.lineRasterizationMode = lineStateCreateInfo->lineRasterizationMode;
277 }
Alexis Hetu23f9c252019-10-17 13:58:25 -0400278 break;
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000279 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT:
280 {
281 const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *provokingVertexModeCreateInfo =
282 reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>(extensionCreateInfo);
283 context.provokingVertexMode = provokingVertexModeCreateInfo->provokingVertexMode;
284 }
285 break;
286 default:
287 WARN("pCreateInfo->pRasterizationState->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
288 break;
Alexis Hetu23f9c252019-10-17 13:58:25 -0400289 }
290
291 extensionCreateInfo = extensionCreateInfo->pNext;
292 }
293
Nicolas Capens15d3dda2020-06-11 21:40:43 -0400294 // The sample count affects the batch size, so it needs initialization even if rasterization is disabled.
295 // TODO(b/147812380): Eliminate the dependency between multisampling and batch size.
296 context.sampleCount = 1;
297
Nicolas Capens763957e2020-06-11 23:50:37 -0400298 // Only access rasterization state if rasterization is not disabled.
299 if(rasterizationState->rasterizerDiscardEnable == VK_FALSE)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500300 {
Nicolas Capens763957e2020-06-11 23:50:37 -0400301 const VkPipelineViewportStateCreateInfo *viewportState = pCreateInfo->pViewportState;
302 const VkPipelineMultisampleStateCreateInfo *multisampleState = pCreateInfo->pMultisampleState;
303 const VkPipelineDepthStencilStateCreateInfo *depthStencilState = pCreateInfo->pDepthStencilState;
304 const VkPipelineColorBlendStateCreateInfo *colorBlendState = pCreateInfo->pColorBlendState;
305
306 if(viewportState->flags != 0)
307 {
308 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
309 UNSUPPORTED("pCreateInfo->pViewportState->flags %d", int(pCreateInfo->pViewportState->flags));
310 }
311
312 if((viewportState->viewportCount != 1) ||
313 (viewportState->scissorCount != 1))
314 {
315 UNSUPPORTED("VkPhysicalDeviceFeatures::multiViewport");
316 }
317
318 if(!hasDynamicState(VK_DYNAMIC_STATE_SCISSOR))
319 {
320 scissor = viewportState->pScissors[0];
321 }
322
323 if(!hasDynamicState(VK_DYNAMIC_STATE_VIEWPORT))
324 {
325 viewport = viewportState->pViewports[0];
326 }
327
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500328 if(multisampleState->flags != 0)
329 {
330 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
331 UNSUPPORTED("pCreateInfo->pMultisampleState->flags %d", int(pCreateInfo->pMultisampleState->flags));
332 }
333
334 if(multisampleState->sampleShadingEnable != VK_FALSE)
335 {
336 UNSUPPORTED("VkPhysicalDeviceFeatures::sampleRateShading");
337 }
338
339 if(multisampleState->alphaToOneEnable != VK_FALSE)
340 {
341 UNSUPPORTED("VkPhysicalDeviceFeatures::alphaToOne");
342 }
343
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500344 switch(multisampleState->rasterizationSamples)
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400345 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000346 case VK_SAMPLE_COUNT_1_BIT:
347 context.sampleCount = 1;
348 break;
349 case VK_SAMPLE_COUNT_4_BIT:
350 context.sampleCount = 4;
351 break;
352 default:
Nicolas Capens865f8892020-01-21 14:27:10 -0500353 UNSUPPORTED("Unsupported sample count");
Chris Forbes10547582019-02-28 10:07:55 -0800354 }
355
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500356 if(multisampleState->pSampleMask)
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400357 {
Chris Forbesbbf5cf12019-03-06 09:00:56 -0800358 context.sampleMask = multisampleState->pSampleMask[0];
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400359 }
Nicolas Capens15d3dda2020-06-11 21:40:43 -0400360 else // "If pSampleMask is NULL, it is treated as if the mask has all bits set to 1."
361 {
362 context.sampleMask = ~0;
363 }
Chris Forbesbbf5cf12019-03-06 09:00:56 -0800364
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500365 context.alphaToCoverage = (multisampleState->alphaToCoverageEnable != VK_FALSE);
Nicolas Capens763957e2020-06-11 23:50:37 -0400366 context.multiSampleMask = context.sampleMask & ((unsigned)0xFFFFFFFF >> (32 - context.sampleCount));
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500367
Nicolas Capens763957e2020-06-11 23:50:37 -0400368 const vk::RenderPass *renderPass = vk::Cast(pCreateInfo->renderPass);
369 const VkSubpassDescription &subpass = renderPass->getSubpass(pCreateInfo->subpass);
370
371 // Ignore pDepthStencilState when "the subpass of the render pass the pipeline is created against does not use a depth/stencil attachment"
372 if(subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500373 {
Nicolas Capens763957e2020-06-11 23:50:37 -0400374 if(depthStencilState->flags != 0)
375 {
376 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
377 UNSUPPORTED("pCreateInfo->pDepthStencilState->flags %d", int(pCreateInfo->pDepthStencilState->flags));
378 }
379
380 if(depthStencilState->depthBoundsTestEnable != VK_FALSE)
381 {
382 UNSUPPORTED("VkPhysicalDeviceFeatures::depthBounds");
383 }
384
385 context.depthBoundsTestEnable = (depthStencilState->depthBoundsTestEnable != VK_FALSE);
386 context.depthBufferEnable = (depthStencilState->depthTestEnable != VK_FALSE);
387 context.depthWriteEnable = (depthStencilState->depthWriteEnable != VK_FALSE);
388 context.depthCompareMode = depthStencilState->depthCompareOp;
389
390 context.stencilEnable = (depthStencilState->stencilTestEnable != VK_FALSE);
391 if(context.stencilEnable)
392 {
393 context.frontStencil = depthStencilState->front;
394 context.backStencil = depthStencilState->back;
395 }
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500396 }
397
Nicolas Capens763957e2020-06-11 23:50:37 -0400398 bool colorAttachmentUsed = false;
399 for(uint32_t i = 0; i < subpass.colorAttachmentCount; i++)
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500400 {
Nicolas Capens763957e2020-06-11 23:50:37 -0400401 if(subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED)
402 {
403 colorAttachmentUsed = true;
404 break;
405 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500406 }
407
Nicolas Capens763957e2020-06-11 23:50:37 -0400408 // Ignore pColorBlendState when "the subpass of the render pass the pipeline is created against does not use any color attachments"
409 if(colorAttachmentUsed)
Alexis Hetua0a80b12019-02-15 15:56:14 -0500410 {
Nicolas Capens763957e2020-06-11 23:50:37 -0400411 if(colorBlendState->flags != 0)
412 {
413 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
414 UNSUPPORTED("pCreateInfo->pColorBlendState->flags %d", int(pCreateInfo->pColorBlendState->flags));
415 }
416
417 if(colorBlendState->logicOpEnable != VK_FALSE)
418 {
419 UNSUPPORTED("VkPhysicalDeviceFeatures::logicOp");
420 }
421
422 if(!hasDynamicState(VK_DYNAMIC_STATE_BLEND_CONSTANTS))
423 {
424 blendConstants.x = colorBlendState->blendConstants[0];
425 blendConstants.y = colorBlendState->blendConstants[1];
426 blendConstants.z = colorBlendState->blendConstants[2];
427 blendConstants.w = colorBlendState->blendConstants[3];
428 }
429
430 for(auto i = 0u; i < colorBlendState->attachmentCount; i++)
431 {
432 const VkPipelineColorBlendAttachmentState &attachment = colorBlendState->pAttachments[i];
433 context.colorWriteMask[i] = attachment.colorWriteMask;
434
435 context.setBlendState(i, { (attachment.blendEnable != VK_FALSE),
436 attachment.srcColorBlendFactor, attachment.dstColorBlendFactor, attachment.colorBlendOp,
437 attachment.srcAlphaBlendFactor, attachment.dstAlphaBlendFactor, attachment.alphaBlendOp });
438 }
Alexis Hetua0a80b12019-02-15 15:56:14 -0500439 }
440 }
Alexis Hetu000df8b2018-10-24 15:22:41 -0400441}
442
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000443void GraphicsPipeline::destroyPipeline(const VkAllocationCallbacks *pAllocator)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400444{
Alexis Hetu52edb172019-06-26 10:17:18 -0400445 vertexShader.reset();
446 fragmentShader.reset();
Alexis Hetu000df8b2018-10-24 15:22:41 -0400447}
448
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000449size_t GraphicsPipeline::ComputeRequiredAllocationSize(const VkGraphicsPipelineCreateInfo *pCreateInfo)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400450{
451 return 0;
452}
453
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000454void GraphicsPipeline::setShader(const VkShaderStageFlagBits &stage, const std::shared_ptr<sw::SpirvShader> spirvShader)
Alexis Hetu52edb172019-06-26 10:17:18 -0400455{
456 switch(stage)
457 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000458 case VK_SHADER_STAGE_VERTEX_BIT:
459 ASSERT(vertexShader.get() == nullptr);
460 vertexShader = spirvShader;
461 context.vertexShader = vertexShader.get();
462 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400463
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000464 case VK_SHADER_STAGE_FRAGMENT_BIT:
465 ASSERT(fragmentShader.get() == nullptr);
466 fragmentShader = spirvShader;
467 context.pixelShader = fragmentShader.get();
468 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400469
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000470 default:
471 UNSUPPORTED("Unsupported stage");
472 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400473 }
474}
475
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000476const std::shared_ptr<sw::SpirvShader> GraphicsPipeline::getShader(const VkShaderStageFlagBits &stage) const
Alexis Hetu52edb172019-06-26 10:17:18 -0400477{
478 switch(stage)
479 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000480 case VK_SHADER_STAGE_VERTEX_BIT:
481 return vertexShader;
482 case VK_SHADER_STAGE_FRAGMENT_BIT:
483 return fragmentShader;
484 default:
485 UNSUPPORTED("Unsupported stage");
486 return fragmentShader;
Alexis Hetu52edb172019-06-26 10:17:18 -0400487 }
488}
489
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000490void GraphicsPipeline::compileShaders(const VkAllocationCallbacks *pAllocator, const VkGraphicsPipelineCreateInfo *pCreateInfo, PipelineCache *pPipelineCache)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500491{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500492 for(auto pStage = pCreateInfo->pStages; pStage != pCreateInfo->pStages + pCreateInfo->stageCount; pStage++)
Chris Forbeseea21ba2019-01-31 07:54:05 -0800493 {
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500494 if(pStage->flags != 0)
Chris Forbesd1ab73d2019-03-14 13:07:08 -0700495 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500496 // Vulkan 1.2: "flags must be 0"
497 UNSUPPORTED("pStage->flags %d", int(pStage->flags));
Chris Forbesd1ab73d2019-03-14 13:07:08 -0700498 }
Chris Forbesaf4ed532018-12-06 18:33:27 -0800499
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -0400500 const ShaderModule *module = vk::Cast(pStage->module);
Alexis Hetu52edb172019-06-26 10:17:18 -0400501 const PipelineCache::SpirvShaderKey key(pStage->stage, pStage->pName, module->getCode(),
502 vk::Cast(pCreateInfo->renderPass), pCreateInfo->subpass,
503 pStage->pSpecializationInfo);
Nicolas Capensa29aa772019-06-26 00:36:28 -0400504 auto pipelineStage = key.getPipelineStage();
505
Alexis Hetu52edb172019-06-26 10:17:18 -0400506 if(pPipelineCache)
Chris Forbeseea21ba2019-01-31 07:54:05 -0800507 {
Ben Clayton094974d2020-04-08 16:55:02 +0100508 auto shader = pPipelineCache->getOrCreateShader(key, [&] {
509 return createShader(key, module, robustBufferAccess, device->getDebuggerContext());
510 });
511 setShader(pipelineStage, shader);
Alexis Hetu52edb172019-06-26 10:17:18 -0400512 }
513 else
514 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000515 auto shader = createShader(key, module, robustBufferAccess, device->getDebuggerContext());
Nicolas Capensa29aa772019-06-26 00:36:28 -0400516 setShader(pipelineStage, shader);
Chris Forbesaf4ed532018-12-06 18:33:27 -0800517 }
518 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500519}
520
Alexis Hetuc65473d2018-12-07 16:26:05 -0500521uint32_t GraphicsPipeline::computePrimitiveCount(uint32_t vertexCount) const
522{
Alexis Hetufcbb1452018-11-22 15:46:28 -0500523 switch(context.topology)
Alexis Hetuc65473d2018-12-07 16:26:05 -0500524 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000525 case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
526 return vertexCount;
527 case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
528 return vertexCount / 2;
529 case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
530 return std::max<uint32_t>(vertexCount, 1) - 1;
531 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
532 return vertexCount / 3;
533 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
534 return std::max<uint32_t>(vertexCount, 2) - 2;
535 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
536 return std::max<uint32_t>(vertexCount, 2) - 2;
537 default:
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500538 UNSUPPORTED("VkPrimitiveTopology %d", int(context.topology));
Alexis Hetuc65473d2018-12-07 16:26:05 -0500539 }
540
541 return 0;
542}
543
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000544const sw::Context &GraphicsPipeline::getContext() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500545{
546 return context;
547}
548
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000549const VkRect2D &GraphicsPipeline::getScissor() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500550{
551 return scissor;
552}
553
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000554const VkViewport &GraphicsPipeline::getViewport() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500555{
556 return viewport;
557}
558
Nicolas Capens68d9ad82020-04-02 05:22:04 -0400559const sw::float4 &GraphicsPipeline::getBlendConstants() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500560{
561 return blendConstants;
562}
563
Alexis Hetu73832432019-04-11 16:43:18 -0400564bool GraphicsPipeline::hasDynamicState(VkDynamicState dynamicState) const
565{
566 return (dynamicStateFlags & (1 << dynamicState)) != 0;
567}
568
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000569ComputePipeline::ComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo, void *mem, const Device *device)
570 : Pipeline(vk::Cast(pCreateInfo->layout), device)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400571{
572}
573
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000574void ComputePipeline::destroyPipeline(const VkAllocationCallbacks *pAllocator)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400575{
Alexis Hetu52edb172019-06-26 10:17:18 -0400576 shader.reset();
577 program.reset();
Alexis Hetu000df8b2018-10-24 15:22:41 -0400578}
579
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000580size_t ComputePipeline::ComputeRequiredAllocationSize(const VkComputePipelineCreateInfo *pCreateInfo)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400581{
582 return 0;
583}
584
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000585void ComputePipeline::compileShaders(const VkAllocationCallbacks *pAllocator, const VkComputePipelineCreateInfo *pCreateInfo, PipelineCache *pPipelineCache)
Ben Claytonf2be26a2019-03-08 12:02:05 +0000586{
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -0400587 auto &stage = pCreateInfo->stage;
588 const ShaderModule *module = vk::Cast(stage.module);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000589
Alexis Hetu52edb172019-06-26 10:17:18 -0400590 ASSERT(shader.get() == nullptr);
591 ASSERT(program.get() == nullptr);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000592
Alexis Hetu52edb172019-06-26 10:17:18 -0400593 const PipelineCache::SpirvShaderKey shaderKey(
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000594 stage.stage, stage.pName, module->getCode(), nullptr, 0, stage.pSpecializationInfo);
Alexis Hetu52edb172019-06-26 10:17:18 -0400595 if(pPipelineCache)
596 {
Ben Clayton094974d2020-04-08 16:55:02 +0100597 shader = pPipelineCache->getOrCreateShader(shaderKey, [&] {
598 return createShader(shaderKey, module, robustBufferAccess, device->getDebuggerContext());
599 });
Ben Claytonf2be26a2019-03-08 12:02:05 +0000600
Ben Clayton094974d2020-04-08 16:55:02 +0100601 const PipelineCache::ComputeProgramKey programKey(shader.get(), layout);
602 program = pPipelineCache->getOrCreateComputeProgram(programKey, [&] {
603 return createProgram(programKey);
604 });
Alexis Hetu52edb172019-06-26 10:17:18 -0400605 }
606 else
607 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000608 shader = createShader(shaderKey, module, robustBufferAccess, device->getDebuggerContext());
Alexis Hetu52edb172019-06-26 10:17:18 -0400609 const PipelineCache::ComputeProgramKey programKey(shader.get(), layout);
610 program = createProgram(programKey);
611 }
Ben Claytonf2be26a2019-03-08 12:02:05 +0000612}
613
Chris Forbes4a4c2592019-05-13 08:53:36 -0700614void ComputePipeline::run(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ,
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000615 uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ,
Alexis Hetu4f438a52020-06-15 16:13:51 -0400616 vk::DescriptorSet::Array const &descriptorSetObjects,
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000617 vk::DescriptorSet::Bindings const &descriptorSets,
618 vk::DescriptorSet::DynamicOffsets const &descriptorDynamicOffsets,
619 sw::PushConstantStorage const &pushConstants)
Ben Claytonf2be26a2019-03-08 12:02:05 +0000620{
Ben Clayton895df0d2019-05-08 08:49:58 +0100621 ASSERT_OR_RETURN(program != nullptr);
622 program->run(
Alexis Hetu4f438a52020-06-15 16:13:51 -0400623 descriptorSetObjects, descriptorSets, descriptorDynamicOffsets, pushConstants,
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000624 baseGroupX, baseGroupY, baseGroupZ,
625 groupCountX, groupCountY, groupCountZ);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000626}
627
Nicolas Capens157ba262019-12-10 17:49:14 -0500628} // namespace vk