blob: e56ad9da9eee365ca1f89f264fd39ff2c3aa4036 [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120036#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130037// The following is #included again to catch certain OS-specific functions
38// being used:
39#include "loader_platform.h"
40
Chris Forbes7f720542015-05-09 10:31:21 +120041#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130042
Chris Forbes2778f302015-04-02 13:22:31 +130043
Chris Forbesb6b8c462015-04-15 06:59:41 +120044static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
45static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes3b1c4212015-04-08 10:11:59 +120046
Chris Forbes3a5e99a2015-04-10 11:41:20 +120047
48static void
49build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
50{
51 unsigned int const *code = (unsigned int const *)&words[0];
52 size_t size = words.size();
53
54 unsigned word = 5;
55 while (word < size) {
56 unsigned opcode = code[word] & 0x0ffffu;
57 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
58
59 switch (opcode) {
60 case spv::OpTypeVoid:
61 case spv::OpTypeBool:
62 case spv::OpTypeInt:
63 case spv::OpTypeFloat:
64 case spv::OpTypeVector:
65 case spv::OpTypeMatrix:
66 case spv::OpTypeSampler:
67 case spv::OpTypeFilter:
68 case spv::OpTypeArray:
69 case spv::OpTypeRuntimeArray:
70 case spv::OpTypeStruct:
71 case spv::OpTypeOpaque:
72 case spv::OpTypePointer:
73 case spv::OpTypeFunction:
74 case spv::OpTypeEvent:
75 case spv::OpTypeDeviceEvent:
76 case spv::OpTypeReserveId:
77 case spv::OpTypeQueue:
78 case spv::OpTypePipe:
79 type_def_index[code[word+1]] = word;
80 break;
81
82 default:
83 /* We only care about type definitions */
84 break;
85 }
86
87 word += oplen;
88 }
89}
90
Chris Forbes3b1c4212015-04-08 10:11:59 +120091struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120092 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120093 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +120094 /* a mapping of <id> to the first word of its def. this is useful because walking type
95 * trees requires jumping all over the instruction stream.
96 */
97 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes3b1c4212015-04-08 10:11:59 +120098
99 shader_source(VkShaderCreateInfo const *pCreateInfo) :
100 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200101
102 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200103 }
104};
105
106
107static std::unordered_map<void *, shader_source *> shader_map;
108
109
Chris Forbesb6b8c462015-04-15 06:59:41 +1200110static void
111initLayer()
112{
113 const char *strOpt;
114 // initialize ShaderChecker options
115 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
116 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
117
118 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
119 {
120 strOpt = getLayerOption("ShaderCheckerLogFilename");
121 if (strOpt)
122 {
123 g_logFile = fopen(strOpt, "w");
124 }
125 if (g_logFile == NULL)
126 g_logFile = stdout;
127 }
128}
129
130
Chris Forbes2778f302015-04-02 13:22:31 +1300131static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *gpuw)
132{
133 VkLayerDispatchTable *pTable;
134
135 assert(gpuw);
136 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) gpuw->baseObject);
137 if (it == tableMap.end())
138 {
139 pTable = new VkLayerDispatchTable;
140 tableMap[(void *) gpuw->baseObject] = pTable;
141 } else
142 {
143 return it->second;
144 }
145
Chia-I Wua3b9a202015-04-17 02:00:54 +0800146 layer_initialize_dispatch_table(pTable, gpuw->pGPA, (VkPhysicalDevice) gpuw->nextObject);
Chris Forbes2778f302015-04-02 13:22:31 +1300147
148 return pTable;
149}
150
151
Chia-I Wua3b9a202015-04-17 02:00:54 +0800152VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
Chris Forbes2778f302015-04-02 13:22:31 +1300153{
154 VkLayerDispatchTable* pTable = tableMap[gpu];
155 VkResult result = pTable->CreateDevice(gpu, pCreateInfo, pDevice);
Chris Forbesb6b8c462015-04-15 06:59:41 +1200156
157 loader_platform_thread_once(&g_initOnce, initLayer);
Chris Forbes2778f302015-04-02 13:22:31 +1300158 // create a mapping for the device object into the dispatch table
159 tableMap.emplace(*pDevice, pTable);
160 return result;
161}
162
163
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600164VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbes2778f302015-04-02 13:22:31 +1300165{
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600166 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300167 return VK_ERROR_INVALID_POINTER;
168
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600169 if (*pLayerCount < 1)
Chris Forbes2778f302015-04-02 13:22:31 +1300170 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600171 *pLayerCount = 1;
Chris Forbes2778f302015-04-02 13:22:31 +1300172 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
173 return VK_SUCCESS;
174}
175
176
177struct extProps {
178 uint32_t version;
179 const char * const name;
180};
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600181#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbes2778f302015-04-02 13:22:31 +1300182static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
183 // TODO what is the version?
184 0x10, "ShaderChecker",
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600185 0x10, "Validation",
Chris Forbes2778f302015-04-02 13:22:31 +1300186};
187
Chris Forbes2778f302015-04-02 13:22:31 +1300188VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
189 VkExtensionInfoType infoType,
190 uint32_t extensionIndex,
191 size_t* pDataSize,
192 void* pData)
193{
Chris Forbes2778f302015-04-02 13:22:31 +1300194 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
195 VkExtensionProperties *ext_props;
196 uint32_t *count;
197
198 if (pDataSize == NULL)
199 return VK_ERROR_INVALID_POINTER;
200
201 switch (infoType) {
202 case VK_EXTENSION_INFO_TYPE_COUNT:
203 *pDataSize = sizeof(uint32_t);
204 if (pData == NULL)
205 return VK_SUCCESS;
206 count = (uint32_t *) pData;
207 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
208 break;
209 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
210 *pDataSize = sizeof(VkExtensionProperties);
211 if (pData == NULL)
212 return VK_SUCCESS;
213 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
214 return VK_ERROR_INVALID_VALUE;
215 ext_props = (VkExtensionProperties *) pData;
216 ext_props->version = shaderCheckerExts[extensionIndex].version;
217 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
218 VK_MAX_EXTENSION_NAME);
219 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
220 break;
221 default:
222 return VK_ERROR_INVALID_VALUE;
223 };
224
225 return VK_SUCCESS;
226}
227
228
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200229static char const *
230storage_class_name(unsigned sc)
231{
232 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600233 case spv::StorageClassInput: return "input";
234 case spv::StorageClassOutput: return "output";
235 case spv::StorageClassUniformConstant: return "const uniform";
236 case spv::StorageClassUniform: return "uniform";
237 case spv::StorageClassWorkgroupLocal: return "workgroup local";
238 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
239 case spv::StorageClassPrivateGlobal: return "private global";
240 case spv::StorageClassFunction: return "function";
241 case spv::StorageClassGeneric: return "generic";
242 case spv::StorageClassPrivate: return "private";
243 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200244 default: return "unknown";
245 }
246}
247
248
249/* returns ptr to null terminator */
250static char *
251describe_type(char *dst, shader_source const *src, unsigned type)
252{
253 auto type_def_it = src->type_def_index.find(type);
254
255 if (type_def_it == src->type_def_index.end()) {
256 return dst + sprintf(dst, "undef");
257 }
258
259 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
260 unsigned opcode = code[0] & 0x0ffffu;
261 switch (opcode) {
262 case spv::OpTypeBool:
263 return dst + sprintf(dst, "bool");
264 case spv::OpTypeInt:
265 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
266 case spv::OpTypeFloat:
267 return dst + sprintf(dst, "float%d", code[2]);
268 case spv::OpTypeVector:
269 dst += sprintf(dst, "vec%d of ", code[3]);
270 return describe_type(dst, src, code[2]);
271 case spv::OpTypeMatrix:
272 dst += sprintf(dst, "mat%d of ", code[3]);
273 return describe_type(dst, src, code[2]);
274 case spv::OpTypeArray:
275 dst += sprintf(dst, "arr[%d] of ", code[3]);
276 return describe_type(dst, src, code[2]);
277 case spv::OpTypePointer:
278 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
279 return describe_type(dst, src, code[3]);
280 case spv::OpTypeStruct:
281 {
282 unsigned oplen = code[0] >> 16;
283 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600284 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200285 dst = describe_type(dst, src, code[i]);
286 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
287 }
288 return dst;
289 }
290 default:
291 return dst + sprintf(dst, "oddtype");
292 }
293}
294
295
296static bool
297types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type)
298{
299 auto a_type_def_it = a->type_def_index.find(a_type);
300 auto b_type_def_it = b->type_def_index.find(b_type);
301
302 if (a_type_def_it == a->type_def_index.end()) {
303 printf("ERR: can't find def for type %d in producing shader %p; SPIRV probably invalid.\n",
304 a_type, a);
305 return false;
306 }
307
308 if (b_type_def_it == b->type_def_index.end()) {
309 printf("ERR: can't find def for type %d in consuming shader %p; SPIRV probably invalid.\n",
310 b_type, b);
311 return false;
312 }
313
314 /* walk two type trees together, and complain about differences */
315 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
316 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
317
318 unsigned a_opcode = a_code[0] & 0x0ffffu;
319 unsigned b_opcode = b_code[0] & 0x0ffffu;
320
321 if (a_opcode != b_opcode) {
322 printf(" - FAIL: type def opcodes differ: %d vs %d\n", a_opcode, b_opcode);
323 return false;
324 }
325
326 switch (a_opcode) {
327 case spv::OpTypeBool:
328 return true;
329 case spv::OpTypeInt:
330 /* match on width, signedness */
331 return a_code[2] == b_code[2] && a_code[3] == b_code[3];
332 case spv::OpTypeFloat:
333 /* match on width */
334 return a_code[2] == b_code[2];
335 case spv::OpTypeVector:
336 case spv::OpTypeMatrix:
337 case spv::OpTypeArray:
338 /* match on element type, count. these all have the same layout */
339 return types_match(a, b, a_code[2], b_code[2]) && a_code[3] == b_code[3];
340 case spv::OpTypeStruct:
341 /* match on all element types */
342 {
343 unsigned a_len = a_code[0] >> 16;
344 unsigned b_len = b_code[0] >> 16;
345
346 if (a_len != b_len) {
347 return false; /* structs cannot match if member counts differ */
348 }
349
Ian Elliott1cb62222015-04-17 11:05:04 -0600350 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200351 if (!types_match(a, b, a_code[i], b_code[i])) {
352 return false;
353 }
354 }
355
356 return true;
357 }
358 case spv::OpTypePointer:
359 /* match on pointee type. storage class is expected to differ */
360 return types_match(a, b, a_code[3], b_code[3]);
361
362 default:
363 /* remaining types are CLisms, or may not appear in the interfaces we
364 * are interested in. Just claim no match.
365 */
366 return false;
367
368 }
369}
370
371
Chris Forbes06e8fc32015-04-13 12:14:52 +1200372static int
373value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
374{
375 auto it = map.find(id);
376 if (it == map.end())
377 return def;
378 else
379 return it->second;
380}
381
382
383struct interface_var {
384 uint32_t id;
385 uint32_t type_id;
386 /* TODO: collect the name, too? Isn't required to be present. */
387};
388
389
390static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600391collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200392 std::map<uint32_t, interface_var> &out,
393 std::map<uint32_t, interface_var> &builtins_out)
394{
395 unsigned int const *code = (unsigned int const *)&src->words[0];
396 size_t size = src->words.size();
397
398 if (code[0] != spv::MagicNumber) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200399 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
400 "Shader is not SPIR-V, unable to extract interface");
Chris Forbes06e8fc32015-04-13 12:14:52 +1200401 return;
402 }
403
404 std::unordered_map<unsigned, unsigned> var_locations;
405 std::unordered_map<unsigned, unsigned> var_builtins;
406
407 unsigned word = 5;
408 while (word < size) {
409
410 unsigned opcode = code[word] & 0x0ffffu;
411 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
412
413 /* We consider two interface models: SSO rendezvous-by-location, and
414 * builtins. Complain about anything that fits neither model.
415 */
416 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600417 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200418 var_locations[code[word+1]] = code[word+3];
419 }
420
Cody Northrop97e52d82015-04-20 14:09:40 -0600421 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200422 var_builtins[code[word+1]] = code[word+3];
423 }
424 }
425
426 /* TODO: handle grouped decorations */
427 /* TODO: handle index=1 dual source outputs from FS -- two vars will
428 * have the same location, and we DONT want to clobber. */
429
Ian Elliott1cb62222015-04-17 11:05:04 -0600430 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200431 int location = value_or_default(var_locations, code[word+2], -1);
432 int builtin = value_or_default(var_builtins, code[word+2], -1);
433
434 if (location == -1 && builtin == -1) {
435 /* No location defined, and not bound to an API builtin.
436 * The spec says nothing about how this case works (or doesn't)
437 * for interface matching.
438 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200439 char str[1024];
440 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600441 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes6b2ead62015-04-17 10:13:28 +1200442 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200443 }
444 else if (location != -1) {
445 /* A user-defined interface variable, with a location. */
446 interface_var v;
447 v.id = code[word+2];
448 v.type_id = code[word+1];
449 out[location] = v;
450 }
451 else {
452 /* A builtin interface variable */
453 interface_var v;
454 v.id = code[word+2];
455 v.type_id = code[word+1];
456 builtins_out[builtin] = v;
457 }
458 }
459
460 word += oplen;
461 }
462}
463
464
Chris Forbes2778f302015-04-02 13:22:31 +1300465VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
466 VkShader *pShader)
467{
468 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
469 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200470
471 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes2778f302015-04-02 13:22:31 +1300472 return res;
473}
474
475
Chris Forbes41002452015-04-08 10:19:16 +1200476static void
477validate_interface_between_stages(shader_source const *producer, char const *producer_name,
478 shader_source const *consumer, char const *consumer_name)
479{
480 std::map<uint32_t, interface_var> outputs;
481 std::map<uint32_t, interface_var> inputs;
482
483 std::map<uint32_t, interface_var> builtin_outputs;
484 std::map<uint32_t, interface_var> builtin_inputs;
485
Chris Forbes6b2ead62015-04-17 10:13:28 +1200486 char str[1024];
Chris Forbes41002452015-04-08 10:19:16 +1200487
Cody Northrop97e52d82015-04-20 14:09:40 -0600488 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
489 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200490
491 auto a_it = outputs.begin();
492 auto b_it = inputs.begin();
493
494 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600495 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
496 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
497 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
498 auto a_first = (outputs.size() > 0 ? a_it->first : 0);
499 auto b_first = (inputs.size() > 0 ? b_it->first : 0);
500
501 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200502 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600503 producer_name, a_first, consumer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200504 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200505 a_it++;
506 }
David Pinedod8f83d82015-04-27 16:36:17 -0600507 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200508 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600509 consumer_name, b_first, producer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200510 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200511 b_it++;
512 }
513 else {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200514 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200515 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200516 }
517 else {
518 char producer_type[1024];
519 char consumer_type[1024];
520 describe_type(producer_type, producer, a_it->second.type_id);
521 describe_type(consumer_type, consumer, b_it->second.type_id);
522
Chris Forbes6b2ead62015-04-17 10:13:28 +1200523 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200524 producer_type, consumer_type);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200525 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200526 }
Chris Forbes41002452015-04-08 10:19:16 +1200527 a_it++;
528 b_it++;
529 }
530 }
Chris Forbes41002452015-04-08 10:19:16 +1200531}
532
533
Chris Forbes3616b462015-04-08 10:37:20 +1200534enum FORMAT_TYPE {
535 FORMAT_TYPE_UNDEFINED,
536 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
537 FORMAT_TYPE_SINT,
538 FORMAT_TYPE_UINT,
539};
540
541
542static unsigned
543get_format_type(VkFormat fmt) {
544 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800545 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200546 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800547 case VK_FORMAT_R8_SINT:
548 case VK_FORMAT_R8G8_SINT:
549 case VK_FORMAT_R8G8B8_SINT:
550 case VK_FORMAT_R8G8B8A8_SINT:
551 case VK_FORMAT_R16_SINT:
552 case VK_FORMAT_R16G16_SINT:
553 case VK_FORMAT_R16G16B16_SINT:
554 case VK_FORMAT_R16G16B16A16_SINT:
555 case VK_FORMAT_R32_SINT:
556 case VK_FORMAT_R32G32_SINT:
557 case VK_FORMAT_R32G32B32_SINT:
558 case VK_FORMAT_R32G32B32A32_SINT:
559 case VK_FORMAT_B8G8R8_SINT:
560 case VK_FORMAT_B8G8R8A8_SINT:
561 case VK_FORMAT_R10G10B10A2_SINT:
562 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200563 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800564 case VK_FORMAT_R8_UINT:
565 case VK_FORMAT_R8G8_UINT:
566 case VK_FORMAT_R8G8B8_UINT:
567 case VK_FORMAT_R8G8B8A8_UINT:
568 case VK_FORMAT_R16_UINT:
569 case VK_FORMAT_R16G16_UINT:
570 case VK_FORMAT_R16G16B16_UINT:
571 case VK_FORMAT_R16G16B16A16_UINT:
572 case VK_FORMAT_R32_UINT:
573 case VK_FORMAT_R32G32_UINT:
574 case VK_FORMAT_R32G32B32_UINT:
575 case VK_FORMAT_R32G32B32A32_UINT:
576 case VK_FORMAT_B8G8R8_UINT:
577 case VK_FORMAT_B8G8R8A8_UINT:
578 case VK_FORMAT_R10G10B10A2_UINT:
579 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200580 return FORMAT_TYPE_UINT;
581 default:
582 return FORMAT_TYPE_FLOAT;
583 }
584}
585
586
Chris Forbes772d03b2015-04-08 10:36:37 +1200587static void
588validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
589{
590 std::map<uint32_t, interface_var> inputs;
591 /* we collect builtin inputs, but they will never appear in the VI state --
592 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
593 */
594 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200595 char str[1024];
Chris Forbes772d03b2015-04-08 10:36:37 +1200596
Cody Northrop97e52d82015-04-20 14:09:40 -0600597 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200598
599 /* Build index by location */
600 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Ian Elliott1cb62222015-04-17 11:05:04 -0600601 for (unsigned i = 0; i < vi->attributeCount; i++)
Chris Forbes772d03b2015-04-08 10:36:37 +1200602 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
603
604 auto it_a = attribs.begin();
605 auto it_b = inputs.begin();
606
David Pinedod8f83d82015-04-27 16:36:17 -0600607 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
608 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
609 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
610 auto a_first = (attribs.size() > 0 ? it_a->first : 0);
611 auto b_first = (inputs.size() > 0 ? it_b->first : 0);
612 if (b_at_end || a_first < b_first) {
613 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200614 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200615 it_a++;
616 }
David Pinedod8f83d82015-04-27 16:36:17 -0600617 else if (a_at_end || b_first < a_first) {
618 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200619 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200620 it_b++;
621 }
622 else {
623 /* TODO: type check */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200624 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200625 it_a++;
626 it_b++;
627 }
628 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200629}
630
631
Chris Forbes3616b462015-04-08 10:37:20 +1200632static void
633validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
634{
635 std::map<uint32_t, interface_var> outputs;
636 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200637 char str[1024];
Chris Forbes3616b462015-04-08 10:37:20 +1200638
639 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
640
Cody Northrop97e52d82015-04-20 14:09:40 -0600641 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200642
643 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
644 * and all color attachment should be UNORM/SNORM/FLOAT.
645 */
646 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
647 bool broadcast_err = false;
648 if (outputs.size()) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200649 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
650 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes3616b462015-04-08 10:37:20 +1200651 broadcast_err = true;
652 }
653
Ian Elliott1cb62222015-04-17 11:05:04 -0600654 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200655 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
656 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200657 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
658 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes3616b462015-04-08 10:37:20 +1200659 broadcast_err = true;
660 }
661 }
662
Chris Forbes3616b462015-04-08 10:37:20 +1200663 return;
664 }
665
666 auto it = outputs.begin();
667 uint32_t attachment = 0;
668
669 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
670 * are currently dense, but the parallel with matching between shader stages is nice.
671 */
672
David Pinedoc2852162015-04-27 13:20:08 -0600673 while (outputs.size() > 0 && (it != outputs.end() || attachment < cb->attachmentCount)) {
Chris Forbes3616b462015-04-08 10:37:20 +1200674 if (attachment == cb->attachmentCount || it->first < attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200675 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
676 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200677 it++;
678 }
679 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200680 sprintf(str, "Attachment %d not written by FS", attachment);
681 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200682 attachment++;
683 }
684 else {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200685 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200686 /* TODO: typecheck */
687 it++;
688 attachment++;
689 }
690 }
Chris Forbes3616b462015-04-08 10:37:20 +1200691}
692
693
Chris Forbes4175e6f2015-04-08 10:15:35 +1200694VK_LAYER_EXPORT VkResult VKAPI vkCreateGraphicsPipeline(VkDevice device,
695 const VkGraphicsPipelineCreateInfo *pCreateInfo,
696 VkPipeline *pPipeline)
697{
Chris Forbes6b2ead62015-04-17 10:13:28 +1200698 /* TODO: run cross-stage validation for GS, TCS, TES stages */
Chris Forbes4175e6f2015-04-08 10:15:35 +1200699
Chris Forbesf6800b52015-04-08 10:16:45 +1200700 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
701 * before trying to do anything more: */
702
703 shader_source const *vs_source = 0;
704 shader_source const *fs_source = 0;
705 VkPipelineCbStateCreateInfo const *cb = 0;
706 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200707 char str[1024];
Chris Forbesf6800b52015-04-08 10:16:45 +1200708
709 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
710 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
711 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
712
Chris Forbes6b2ead62015-04-17 10:13:28 +1200713 if (shader_stage->shader.stage == VK_SHADER_STAGE_VERTEX) {
Chris Forbesf6800b52015-04-08 10:16:45 +1200714 vs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes6b2ead62015-04-17 10:13:28 +1200715 }
716 else if (shader_stage->shader.stage == VK_SHADER_STAGE_FRAGMENT) {
Chris Forbesf6800b52015-04-08 10:16:45 +1200717 fs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes6b2ead62015-04-17 10:13:28 +1200718 }
719 else {
720 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
721 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
722 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200723 }
724 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
725 cb = (VkPipelineCbStateCreateInfo const *)stage;
726 }
727 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
728 vi = (VkPipelineVertexInputCreateInfo const *)stage;
729 }
730 }
731
Chris Forbes6b2ead62015-04-17 10:13:28 +1200732 sprintf(str, "Pipeline: vi=%p vs=%p fs=%p cb=%p\n", vi, vs_source, fs_source, cb);
733 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NONE, "SC", str);
Chris Forbesf6800b52015-04-08 10:16:45 +1200734
Chris Forbes772d03b2015-04-08 10:36:37 +1200735 if (vi && vs_source) {
736 validate_vi_against_vs_inputs(vi, vs_source);
737 }
738
Chris Forbes41002452015-04-08 10:19:16 +1200739 if (vs_source && fs_source) {
740 validate_interface_between_stages(vs_source, "vertex shader",
741 fs_source, "fragment shader");
742 }
743
Chris Forbes3616b462015-04-08 10:37:20 +1200744 if (fs_source && cb) {
745 validate_fs_outputs_against_cb(fs_source, cb);
746 }
747
Chris Forbes4175e6f2015-04-08 10:15:35 +1200748 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
749 VkResult res = pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
750 return res;
751}
752
753
Chia-I Wua3b9a202015-04-17 02:00:54 +0800754VK_LAYER_EXPORT void * VKAPI vkGetProcAddr(VkPhysicalDevice gpu, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +1300755{
756 if (gpu == NULL)
757 return NULL;
758
759 initLayerTable((const VkBaseLayerObject *) gpu);
760
Chris Forbesb6b8c462015-04-15 06:59:41 +1200761 loader_platform_thread_once(&g_initOnce, initLayer);
762
Chris Forbes2778f302015-04-02 13:22:31 +1300763#define ADD_HOOK(fn) \
764 if (!strncmp(#fn, pName, sizeof(#fn))) \
765 return (void *) fn
766
767 ADD_HOOK(vkGetProcAddr);
768 ADD_HOOK(vkEnumerateLayers);
769 ADD_HOOK(vkCreateDevice);
770 ADD_HOOK(vkCreateShader);
Chris Forbes4175e6f2015-04-08 10:15:35 +1200771 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbes2778f302015-04-02 13:22:31 +1300772
773 VkBaseLayerObject* gpuw = (VkBaseLayerObject *) gpu;
774 if (gpuw->pGPA == NULL)
775 return NULL;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800776 return gpuw->pGPA((VkPhysicalDevice) gpuw->nextObject, pName);
Chris Forbes2778f302015-04-02 13:22:31 +1300777}