blob: 0fa081260d659968c85ca2c3e87c3f908f336e34 [file] [log] [blame]
Chris Forbesaab9d112015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes67cc36f2015-04-13 12:14:52 +120027#include <map>
Chris Forbesaab9d112015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbesbb164b62015-04-08 10:19:16 +120029#include <map>
Chris Forbes4396ff52015-04-08 10:11:59 +120030#include <vector>
Chris Forbesaab9d112015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbes1b466bd2015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes3317b382015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes5c75afe2015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes32e3b462015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130043
Chris Forbesaab9d112015-04-02 13:22:31 +130044
Chris Forbes1b466bd2015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
46static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes4396ff52015-04-08 10:11:59 +120047
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120048
49static void
50build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
51{
52 unsigned int const *code = (unsigned int const *)&words[0];
53 size_t size = words.size();
54
55 unsigned word = 5;
56 while (word < size) {
57 unsigned opcode = code[word] & 0x0ffffu;
58 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
59
60 switch (opcode) {
61 case spv::OpTypeVoid:
62 case spv::OpTypeBool:
63 case spv::OpTypeInt:
64 case spv::OpTypeFloat:
65 case spv::OpTypeVector:
66 case spv::OpTypeMatrix:
67 case spv::OpTypeSampler:
68 case spv::OpTypeFilter:
69 case spv::OpTypeArray:
70 case spv::OpTypeRuntimeArray:
71 case spv::OpTypeStruct:
72 case spv::OpTypeOpaque:
73 case spv::OpTypePointer:
74 case spv::OpTypeFunction:
75 case spv::OpTypeEvent:
76 case spv::OpTypeDeviceEvent:
77 case spv::OpTypeReserveId:
78 case spv::OpTypeQueue:
79 case spv::OpTypePipe:
80 type_def_index[code[word+1]] = word;
81 break;
82
83 default:
84 /* We only care about type definitions */
85 break;
86 }
87
88 word += oplen;
89 }
90}
91
Chris Forbes4396ff52015-04-08 10:11:59 +120092struct shader_source {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120093 /* the spirv image itself */
Chris Forbes4396ff52015-04-08 10:11:59 +120094 std::vector<uint32_t> words;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120095 /* a mapping of <id> to the first word of its def. this is useful because walking type
96 * trees requires jumping all over the instruction stream.
97 */
98 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes4396ff52015-04-08 10:11:59 +120099
100 shader_source(VkShaderCreateInfo const *pCreateInfo) :
101 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200102
103 build_type_def_index(words, type_def_index);
Chris Forbes4396ff52015-04-08 10:11:59 +1200104 }
105};
106
107
108static std::unordered_map<void *, shader_source *> shader_map;
109
110
Chris Forbes1b466bd2015-04-15 06:59:41 +1200111static void
112initLayer()
113{
114 const char *strOpt;
115 // initialize ShaderChecker options
116 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
117 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
118
119 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
120 {
121 strOpt = getLayerOption("ShaderCheckerLogFilename");
122 if (strOpt)
123 {
124 g_logFile = fopen(strOpt, "w");
125 }
126 if (g_logFile == NULL)
127 g_logFile = stdout;
128 }
129}
130
131
Chris Forbesaab9d112015-04-02 13:22:31 +1300132static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *gpuw)
133{
134 VkLayerDispatchTable *pTable;
135
136 assert(gpuw);
137 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) gpuw->baseObject);
138 if (it == tableMap.end())
139 {
140 pTable = new VkLayerDispatchTable;
141 tableMap[(void *) gpuw->baseObject] = pTable;
142 } else
143 {
144 return it->second;
145 }
146
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800147 layer_initialize_dispatch_table(pTable, gpuw->pGPA, (VkPhysicalDevice) gpuw->nextObject);
Chris Forbesaab9d112015-04-02 13:22:31 +1300148
149 return pTable;
150}
151
152
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800153VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
Chris Forbesaab9d112015-04-02 13:22:31 +1300154{
155 VkLayerDispatchTable* pTable = tableMap[gpu];
156 VkResult result = pTable->CreateDevice(gpu, pCreateInfo, pDevice);
Chris Forbes1b466bd2015-04-15 06:59:41 +1200157
158 loader_platform_thread_once(&g_initOnce, initLayer);
Chris Forbesaab9d112015-04-02 13:22:31 +1300159 // create a mapping for the device object into the dispatch table
160 tableMap.emplace(*pDevice, pTable);
161 return result;
162}
163
164
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600165VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbesaab9d112015-04-02 13:22:31 +1300166{
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600167 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +1300168 return VK_ERROR_INVALID_POINTER;
169
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600170 if (*pLayerCount < 1)
Chris Forbesaab9d112015-04-02 13:22:31 +1300171 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600172 *pLayerCount = 1;
Chris Forbesaab9d112015-04-02 13:22:31 +1300173 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
174 return VK_SUCCESS;
175}
176
177
178struct extProps {
179 uint32_t version;
180 const char * const name;
181};
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600182#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbesaab9d112015-04-02 13:22:31 +1300183static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
184 // TODO what is the version?
185 0x10, "ShaderChecker",
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600186 0x10, "Validation",
Chris Forbesaab9d112015-04-02 13:22:31 +1300187};
188
Chris Forbesaab9d112015-04-02 13:22:31 +1300189VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
190 VkExtensionInfoType infoType,
191 uint32_t extensionIndex,
192 size_t* pDataSize,
193 void* pData)
194{
Chris Forbesaab9d112015-04-02 13:22:31 +1300195 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
196 VkExtensionProperties *ext_props;
197 uint32_t *count;
198
199 if (pDataSize == NULL)
200 return VK_ERROR_INVALID_POINTER;
201
202 switch (infoType) {
203 case VK_EXTENSION_INFO_TYPE_COUNT:
204 *pDataSize = sizeof(uint32_t);
205 if (pData == NULL)
206 return VK_SUCCESS;
207 count = (uint32_t *) pData;
208 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
209 break;
210 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
211 *pDataSize = sizeof(VkExtensionProperties);
212 if (pData == NULL)
213 return VK_SUCCESS;
214 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
215 return VK_ERROR_INVALID_VALUE;
216 ext_props = (VkExtensionProperties *) pData;
217 ext_props->version = shaderCheckerExts[extensionIndex].version;
218 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
219 VK_MAX_EXTENSION_NAME);
220 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
221 break;
222 default:
223 return VK_ERROR_INVALID_VALUE;
224 };
225
226 return VK_SUCCESS;
227}
228
229
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200230static char const *
231storage_class_name(unsigned sc)
232{
233 switch (sc) {
Cody Northrop812b4612015-04-20 14:09:40 -0600234 case spv::StorageClassInput: return "input";
235 case spv::StorageClassOutput: return "output";
236 case spv::StorageClassUniformConstant: return "const uniform";
237 case spv::StorageClassUniform: return "uniform";
238 case spv::StorageClassWorkgroupLocal: return "workgroup local";
239 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
240 case spv::StorageClassPrivateGlobal: return "private global";
241 case spv::StorageClassFunction: return "function";
242 case spv::StorageClassGeneric: return "generic";
243 case spv::StorageClassPrivate: return "private";
244 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200245 default: return "unknown";
246 }
247}
248
249
250/* returns ptr to null terminator */
251static char *
252describe_type(char *dst, shader_source const *src, unsigned type)
253{
254 auto type_def_it = src->type_def_index.find(type);
255
256 if (type_def_it == src->type_def_index.end()) {
257 return dst + sprintf(dst, "undef");
258 }
259
260 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
261 unsigned opcode = code[0] & 0x0ffffu;
262 switch (opcode) {
263 case spv::OpTypeBool:
264 return dst + sprintf(dst, "bool");
265 case spv::OpTypeInt:
266 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
267 case spv::OpTypeFloat:
268 return dst + sprintf(dst, "float%d", code[2]);
269 case spv::OpTypeVector:
270 dst += sprintf(dst, "vec%d of ", code[3]);
271 return describe_type(dst, src, code[2]);
272 case spv::OpTypeMatrix:
273 dst += sprintf(dst, "mat%d of ", code[3]);
274 return describe_type(dst, src, code[2]);
275 case spv::OpTypeArray:
276 dst += sprintf(dst, "arr[%d] of ", code[3]);
277 return describe_type(dst, src, code[2]);
278 case spv::OpTypePointer:
279 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
280 return describe_type(dst, src, code[3]);
281 case spv::OpTypeStruct:
282 {
283 unsigned oplen = code[0] >> 16;
284 dst += sprintf(dst, "struct of (");
Ian Elliottf21f14b2015-04-17 11:05:04 -0600285 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200286 dst = describe_type(dst, src, code[i]);
287 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
288 }
289 return dst;
290 }
291 default:
292 return dst + sprintf(dst, "oddtype");
293 }
294}
295
296
297static bool
298types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type)
299{
300 auto a_type_def_it = a->type_def_index.find(a_type);
301 auto b_type_def_it = b->type_def_index.find(b_type);
302
303 if (a_type_def_it == a->type_def_index.end()) {
304 printf("ERR: can't find def for type %d in producing shader %p; SPIRV probably invalid.\n",
305 a_type, a);
306 return false;
307 }
308
309 if (b_type_def_it == b->type_def_index.end()) {
310 printf("ERR: can't find def for type %d in consuming shader %p; SPIRV probably invalid.\n",
311 b_type, b);
312 return false;
313 }
314
315 /* walk two type trees together, and complain about differences */
316 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
317 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
318
319 unsigned a_opcode = a_code[0] & 0x0ffffu;
320 unsigned b_opcode = b_code[0] & 0x0ffffu;
321
322 if (a_opcode != b_opcode) {
323 printf(" - FAIL: type def opcodes differ: %d vs %d\n", a_opcode, b_opcode);
324 return false;
325 }
326
327 switch (a_opcode) {
328 case spv::OpTypeBool:
329 return true;
330 case spv::OpTypeInt:
331 /* match on width, signedness */
332 return a_code[2] == b_code[2] && a_code[3] == b_code[3];
333 case spv::OpTypeFloat:
334 /* match on width */
335 return a_code[2] == b_code[2];
336 case spv::OpTypeVector:
337 case spv::OpTypeMatrix:
338 case spv::OpTypeArray:
339 /* match on element type, count. these all have the same layout */
340 return types_match(a, b, a_code[2], b_code[2]) && a_code[3] == b_code[3];
341 case spv::OpTypeStruct:
342 /* match on all element types */
343 {
344 unsigned a_len = a_code[0] >> 16;
345 unsigned b_len = b_code[0] >> 16;
346
347 if (a_len != b_len) {
348 return false; /* structs cannot match if member counts differ */
349 }
350
Ian Elliottf21f14b2015-04-17 11:05:04 -0600351 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200352 if (!types_match(a, b, a_code[i], b_code[i])) {
353 return false;
354 }
355 }
356
357 return true;
358 }
359 case spv::OpTypePointer:
360 /* match on pointee type. storage class is expected to differ */
361 return types_match(a, b, a_code[3], b_code[3]);
362
363 default:
364 /* remaining types are CLisms, or may not appear in the interfaces we
365 * are interested in. Just claim no match.
366 */
367 return false;
368
369 }
370}
371
372
Chris Forbes67cc36f2015-04-13 12:14:52 +1200373static int
374value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
375{
376 auto it = map.find(id);
377 if (it == map.end())
378 return def;
379 else
380 return it->second;
381}
382
383
384struct interface_var {
385 uint32_t id;
386 uint32_t type_id;
387 /* TODO: collect the name, too? Isn't required to be present. */
388};
389
390
391static void
Ian Elliottf21f14b2015-04-17 11:05:04 -0600392collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes67cc36f2015-04-13 12:14:52 +1200393 std::map<uint32_t, interface_var> &out,
394 std::map<uint32_t, interface_var> &builtins_out)
395{
396 unsigned int const *code = (unsigned int const *)&src->words[0];
397 size_t size = src->words.size();
398
399 if (code[0] != spv::MagicNumber) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200400 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
401 "Shader is not SPIR-V, unable to extract interface");
Chris Forbes67cc36f2015-04-13 12:14:52 +1200402 return;
403 }
404
405 std::unordered_map<unsigned, unsigned> var_locations;
406 std::unordered_map<unsigned, unsigned> var_builtins;
407
408 unsigned word = 5;
409 while (word < size) {
410
411 unsigned opcode = code[word] & 0x0ffffu;
412 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
413
414 /* We consider two interface models: SSO rendezvous-by-location, and
415 * builtins. Complain about anything that fits neither model.
416 */
417 if (opcode == spv::OpDecorate) {
Cody Northrop812b4612015-04-20 14:09:40 -0600418 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200419 var_locations[code[word+1]] = code[word+3];
420 }
421
Cody Northrop812b4612015-04-20 14:09:40 -0600422 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200423 var_builtins[code[word+1]] = code[word+3];
424 }
425 }
426
427 /* TODO: handle grouped decorations */
428 /* TODO: handle index=1 dual source outputs from FS -- two vars will
429 * have the same location, and we DONT want to clobber. */
430
Ian Elliottf21f14b2015-04-17 11:05:04 -0600431 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200432 int location = value_or_default(var_locations, code[word+2], -1);
433 int builtin = value_or_default(var_builtins, code[word+2], -1);
434
435 if (location == -1 && builtin == -1) {
436 /* No location defined, and not bound to an API builtin.
437 * The spec says nothing about how this case works (or doesn't)
438 * for interface matching.
439 */
Chris Forbes5c75afe2015-04-17 10:13:28 +1200440 char str[1024];
441 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliottf21f14b2015-04-17 11:05:04 -0600442 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes5c75afe2015-04-17 10:13:28 +1200443 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes67cc36f2015-04-13 12:14:52 +1200444 }
445 else if (location != -1) {
446 /* A user-defined interface variable, with a location. */
447 interface_var v;
448 v.id = code[word+2];
449 v.type_id = code[word+1];
450 out[location] = v;
451 }
452 else {
453 /* A builtin interface variable */
454 interface_var v;
455 v.id = code[word+2];
456 v.type_id = code[word+1];
457 builtins_out[builtin] = v;
458 }
459 }
460
461 word += oplen;
462 }
463}
464
465
Chris Forbesaab9d112015-04-02 13:22:31 +1300466VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
467 VkShader *pShader)
468{
469 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
470 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes4396ff52015-04-08 10:11:59 +1200471
472 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbesaab9d112015-04-02 13:22:31 +1300473 return res;
474}
475
476
Chris Forbesbb164b62015-04-08 10:19:16 +1200477static void
478validate_interface_between_stages(shader_source const *producer, char const *producer_name,
479 shader_source const *consumer, char const *consumer_name)
480{
481 std::map<uint32_t, interface_var> outputs;
482 std::map<uint32_t, interface_var> inputs;
483
484 std::map<uint32_t, interface_var> builtin_outputs;
485 std::map<uint32_t, interface_var> builtin_inputs;
486
Chris Forbes5c75afe2015-04-17 10:13:28 +1200487 char str[1024];
Chris Forbesbb164b62015-04-08 10:19:16 +1200488
Cody Northrop812b4612015-04-20 14:09:40 -0600489 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
490 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesbb164b62015-04-08 10:19:16 +1200491
492 auto a_it = outputs.begin();
493 auto b_it = inputs.begin();
494
495 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedof5997ab2015-04-27 16:36:17 -0600496 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
497 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
498 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
499 auto a_first = (outputs.size() > 0 ? a_it->first : 0);
500 auto b_first = (inputs.size() > 0 ? b_it->first : 0);
501
502 if (b_at_end || a_first < b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200503 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600504 producer_name, a_first, consumer_name);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200505 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesbb164b62015-04-08 10:19:16 +1200506 a_it++;
507 }
David Pinedof5997ab2015-04-27 16:36:17 -0600508 else if (a_at_end || a_first > b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200509 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600510 consumer_name, b_first, producer_name);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200511 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesbb164b62015-04-08 10:19:16 +1200512 b_it++;
513 }
514 else {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200515 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200516 /* OK! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200517 }
518 else {
519 char producer_type[1024];
520 char consumer_type[1024];
521 describe_type(producer_type, producer, a_it->second.type_id);
522 describe_type(consumer_type, consumer, b_it->second.type_id);
523
Chris Forbes5c75afe2015-04-17 10:13:28 +1200524 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200525 producer_type, consumer_type);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200526 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200527 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200528 a_it++;
529 b_it++;
530 }
531 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200532}
533
534
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200535enum FORMAT_TYPE {
536 FORMAT_TYPE_UNDEFINED,
537 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
538 FORMAT_TYPE_SINT,
539 FORMAT_TYPE_UINT,
540};
541
542
543static unsigned
544get_format_type(VkFormat fmt) {
545 switch (fmt) {
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800546 case VK_FORMAT_UNDEFINED:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200547 return FORMAT_TYPE_UNDEFINED;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800548 case VK_FORMAT_R8_SINT:
549 case VK_FORMAT_R8G8_SINT:
550 case VK_FORMAT_R8G8B8_SINT:
551 case VK_FORMAT_R8G8B8A8_SINT:
552 case VK_FORMAT_R16_SINT:
553 case VK_FORMAT_R16G16_SINT:
554 case VK_FORMAT_R16G16B16_SINT:
555 case VK_FORMAT_R16G16B16A16_SINT:
556 case VK_FORMAT_R32_SINT:
557 case VK_FORMAT_R32G32_SINT:
558 case VK_FORMAT_R32G32B32_SINT:
559 case VK_FORMAT_R32G32B32A32_SINT:
560 case VK_FORMAT_B8G8R8_SINT:
561 case VK_FORMAT_B8G8R8A8_SINT:
562 case VK_FORMAT_R10G10B10A2_SINT:
563 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200564 return FORMAT_TYPE_SINT;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800565 case VK_FORMAT_R8_UINT:
566 case VK_FORMAT_R8G8_UINT:
567 case VK_FORMAT_R8G8B8_UINT:
568 case VK_FORMAT_R8G8B8A8_UINT:
569 case VK_FORMAT_R16_UINT:
570 case VK_FORMAT_R16G16_UINT:
571 case VK_FORMAT_R16G16B16_UINT:
572 case VK_FORMAT_R16G16B16A16_UINT:
573 case VK_FORMAT_R32_UINT:
574 case VK_FORMAT_R32G32_UINT:
575 case VK_FORMAT_R32G32B32_UINT:
576 case VK_FORMAT_R32G32B32A32_UINT:
577 case VK_FORMAT_B8G8R8_UINT:
578 case VK_FORMAT_B8G8R8A8_UINT:
579 case VK_FORMAT_R10G10B10A2_UINT:
580 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200581 return FORMAT_TYPE_UINT;
582 default:
583 return FORMAT_TYPE_FLOAT;
584 }
585}
586
587
Chris Forbes28c50882015-05-04 14:04:06 +1200588/* characterizes a SPIR-V type appearing in an interface to a FF stage,
589 * for comparison to a VkFormat's characterization above. */
590static unsigned
591get_fundamental_type(shader_source const *src, unsigned type)
592{
593 auto type_def_it = src->type_def_index.find(type);
594
595 if (type_def_it == src->type_def_index.end()) {
596 return FORMAT_TYPE_UNDEFINED;
597 }
598
599 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
600 unsigned opcode = code[0] & 0x0ffffu;
601 switch (opcode) {
602 case spv::OpTypeInt:
603 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
604 case spv::OpTypeFloat:
605 return FORMAT_TYPE_FLOAT;
606 case spv::OpTypeVector:
607 return get_fundamental_type(src, code[2]);
608 case spv::OpTypeMatrix:
609 return get_fundamental_type(src, code[2]);
610 case spv::OpTypeArray:
611 return get_fundamental_type(src, code[2]);
612 case spv::OpTypePointer:
613 return get_fundamental_type(src, code[3]);
614 default:
615 return FORMAT_TYPE_UNDEFINED;
616 }
617}
618
619
Chris Forbesfcd05f12015-04-08 10:36:37 +1200620static void
621validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
622{
623 std::map<uint32_t, interface_var> inputs;
624 /* we collect builtin inputs, but they will never appear in the VI state --
625 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
626 */
627 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200628 char str[1024];
Chris Forbesfcd05f12015-04-08 10:36:37 +1200629
Cody Northrop812b4612015-04-20 14:09:40 -0600630 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200631
632 /* Build index by location */
633 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Ian Elliottf21f14b2015-04-17 11:05:04 -0600634 for (unsigned i = 0; i < vi->attributeCount; i++)
Chris Forbesfcd05f12015-04-08 10:36:37 +1200635 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
636
637 auto it_a = attribs.begin();
638 auto it_b = inputs.begin();
639
David Pinedof5997ab2015-04-27 16:36:17 -0600640 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
641 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
642 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
643 auto a_first = (attribs.size() > 0 ? it_a->first : 0);
644 auto b_first = (inputs.size() > 0 ? it_b->first : 0);
645 if (b_at_end || a_first < b_first) {
646 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200647 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200648 it_a++;
649 }
David Pinedof5997ab2015-04-27 16:36:17 -0600650 else if (a_at_end || b_first < a_first) {
651 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200652 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200653 it_b++;
654 }
655 else {
Chris Forbes3317b382015-05-04 14:04:24 +1200656 unsigned attrib_type = get_format_type(it_a->second->format);
657 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
658
659 /* type checking */
660 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
661 char vs_type[1024];
662 describe_type(vs_type, vs, it_b->second.type_id);
663 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
664 string_VkFormat(it_a->second->format), a_first, vs_type);
665 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
666 }
667
Chris Forbes5c75afe2015-04-17 10:13:28 +1200668 /* OK! */
Chris Forbesfcd05f12015-04-08 10:36:37 +1200669 it_a++;
670 it_b++;
671 }
672 }
Chris Forbesfcd05f12015-04-08 10:36:37 +1200673}
674
675
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200676static void
677validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
678{
679 std::map<uint32_t, interface_var> outputs;
680 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200681 char str[1024];
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200682
683 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
684
Cody Northrop812b4612015-04-20 14:09:40 -0600685 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200686
687 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
688 * and all color attachment should be UNORM/SNORM/FLOAT.
689 */
690 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
691 bool broadcast_err = false;
692 if (outputs.size()) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200693 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
694 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200695 broadcast_err = true;
696 }
697
Ian Elliottf21f14b2015-04-17 11:05:04 -0600698 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200699 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
700 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200701 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
702 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200703 broadcast_err = true;
704 }
705 }
706
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200707 return;
708 }
709
710 auto it = outputs.begin();
711 uint32_t attachment = 0;
712
713 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
714 * are currently dense, but the parallel with matching between shader stages is nice.
715 */
716
Chris Forbes8802c992015-05-05 11:34:14 +1200717 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan7a62cbe2015-06-01 19:48:11 +0200718 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200719 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
720 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200721 it++;
722 }
723 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200724 sprintf(str, "Attachment %d not written by FS", attachment);
725 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200726 attachment++;
727 }
728 else {
Chris Forbes4b009002015-05-04 14:20:10 +1200729 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
730 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
731
732 /* type checking */
733 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
734 char fs_type[1024];
735 describe_type(fs_type, fs, it->second.type_id);
736 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
737 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
738 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
739 }
740
Chris Forbes5c75afe2015-04-17 10:13:28 +1200741 /* OK! */
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200742 it++;
743 attachment++;
744 }
745 }
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200746}
747
748
Chris Forbes60540932015-04-08 10:15:35 +1200749VK_LAYER_EXPORT VkResult VKAPI vkCreateGraphicsPipeline(VkDevice device,
750 const VkGraphicsPipelineCreateInfo *pCreateInfo,
751 VkPipeline *pPipeline)
752{
Chris Forbes5c75afe2015-04-17 10:13:28 +1200753 /* TODO: run cross-stage validation for GS, TCS, TES stages */
Chris Forbes60540932015-04-08 10:15:35 +1200754
Chris Forbes8f600932015-04-08 10:16:45 +1200755 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
756 * before trying to do anything more: */
757
758 shader_source const *vs_source = 0;
759 shader_source const *fs_source = 0;
760 VkPipelineCbStateCreateInfo const *cb = 0;
761 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200762 char str[1024];
Chris Forbes8f600932015-04-08 10:16:45 +1200763
764 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
765 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
766 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
767
Chris Forbes5c75afe2015-04-17 10:13:28 +1200768 if (shader_stage->shader.stage == VK_SHADER_STAGE_VERTEX) {
Chris Forbes8f600932015-04-08 10:16:45 +1200769 vs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes5c75afe2015-04-17 10:13:28 +1200770 }
771 else if (shader_stage->shader.stage == VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes8f600932015-04-08 10:16:45 +1200772 fs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes5c75afe2015-04-17 10:13:28 +1200773 }
774 else {
775 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
776 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
777 }
Chris Forbes8f600932015-04-08 10:16:45 +1200778 }
779 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
780 cb = (VkPipelineCbStateCreateInfo const *)stage;
781 }
782 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
783 vi = (VkPipelineVertexInputCreateInfo const *)stage;
784 }
785 }
786
Chris Forbes5c75afe2015-04-17 10:13:28 +1200787 sprintf(str, "Pipeline: vi=%p vs=%p fs=%p cb=%p\n", vi, vs_source, fs_source, cb);
788 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NONE, "SC", str);
Chris Forbes8f600932015-04-08 10:16:45 +1200789
Chris Forbesfcd05f12015-04-08 10:36:37 +1200790 if (vi && vs_source) {
791 validate_vi_against_vs_inputs(vi, vs_source);
792 }
793
Chris Forbesbb164b62015-04-08 10:19:16 +1200794 if (vs_source && fs_source) {
795 validate_interface_between_stages(vs_source, "vertex shader",
796 fs_source, "fragment shader");
797 }
798
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200799 if (fs_source && cb) {
800 validate_fs_outputs_against_cb(fs_source, cb);
801 }
802
Chris Forbes60540932015-04-08 10:15:35 +1200803 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
804 VkResult res = pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
805 return res;
806}
807
808
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800809VK_LAYER_EXPORT void * VKAPI vkGetProcAddr(VkPhysicalDevice gpu, const char* pName)
Chris Forbesaab9d112015-04-02 13:22:31 +1300810{
811 if (gpu == NULL)
812 return NULL;
813
814 initLayerTable((const VkBaseLayerObject *) gpu);
815
Chris Forbes1b466bd2015-04-15 06:59:41 +1200816 loader_platform_thread_once(&g_initOnce, initLayer);
817
Chris Forbesaab9d112015-04-02 13:22:31 +1300818#define ADD_HOOK(fn) \
819 if (!strncmp(#fn, pName, sizeof(#fn))) \
820 return (void *) fn
821
822 ADD_HOOK(vkGetProcAddr);
823 ADD_HOOK(vkEnumerateLayers);
824 ADD_HOOK(vkCreateDevice);
825 ADD_HOOK(vkCreateShader);
Chris Forbes60540932015-04-08 10:15:35 +1200826 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbesaab9d112015-04-02 13:22:31 +1300827
828 VkBaseLayerObject* gpuw = (VkBaseLayerObject *) gpu;
829 if (gpuw->pGPA == NULL)
830 return NULL;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800831 return gpuw->pGPA((VkPhysicalDevice) gpuw->nextObject, pName);
Chris Forbesaab9d112015-04-02 13:22:31 +1300832}