blob: 23c6740281b06f5e72eabf8a5ce2687aee358c5b [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes401784b2015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes7f720542015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130043
Chris Forbes2778f302015-04-02 13:22:31 +130044
Chris Forbesb6b8c462015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
Chris Forbes7f963832015-05-29 14:55:18 +120046static VkBaseLayerObject *pCurObj;
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -060047static std::unordered_map<void *, VkLayerInstanceDispatchTable *> tableInstanceMap;
Chris Forbesb6b8c462015-04-15 06:59:41 +120048static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes7f963832015-05-29 14:55:18 +120049// TODO : This can be much smarter, using separate locks for separate global data
50static int globalLockInitialized = 0;
51static loader_platform_thread_mutex globalLock;
Chris Forbes3b1c4212015-04-08 10:11:59 +120052
Chris Forbes3a5e99a2015-04-10 11:41:20 +120053
54static void
55build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
56{
57 unsigned int const *code = (unsigned int const *)&words[0];
58 size_t size = words.size();
59
60 unsigned word = 5;
61 while (word < size) {
62 unsigned opcode = code[word] & 0x0ffffu;
63 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
64
65 switch (opcode) {
66 case spv::OpTypeVoid:
67 case spv::OpTypeBool:
68 case spv::OpTypeInt:
69 case spv::OpTypeFloat:
70 case spv::OpTypeVector:
71 case spv::OpTypeMatrix:
72 case spv::OpTypeSampler:
73 case spv::OpTypeFilter:
74 case spv::OpTypeArray:
75 case spv::OpTypeRuntimeArray:
76 case spv::OpTypeStruct:
77 case spv::OpTypeOpaque:
78 case spv::OpTypePointer:
79 case spv::OpTypeFunction:
80 case spv::OpTypeEvent:
81 case spv::OpTypeDeviceEvent:
82 case spv::OpTypeReserveId:
83 case spv::OpTypeQueue:
84 case spv::OpTypePipe:
85 type_def_index[code[word+1]] = word;
86 break;
87
88 default:
89 /* We only care about type definitions */
90 break;
91 }
92
93 word += oplen;
94 }
95}
96
Chris Forbes3b1c4212015-04-08 10:11:59 +120097struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120098 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120099 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200100 /* a mapping of <id> to the first word of its def. this is useful because walking type
101 * trees requires jumping all over the instruction stream.
102 */
103 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbesf044ec92015-06-05 15:01:08 +1200104 bool is_spirv;
Chris Forbes3b1c4212015-04-08 10:11:59 +1200105
106 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbesf044ec92015-06-05 15:01:08 +1200107 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
108 type_def_index(),
109 is_spirv(true) {
110
111 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
112 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
113 "Shader is not SPIR-V, most checks will not be possible");
114 is_spirv = false;
115 return;
116 }
117
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200118
119 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200120 }
121};
122
123
124static std::unordered_map<void *, shader_source *> shader_map;
125
126
Chris Forbesb6b8c462015-04-15 06:59:41 +1200127static void
128initLayer()
129{
130 const char *strOpt;
131 // initialize ShaderChecker options
132 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
133 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
134
135 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
136 {
137 strOpt = getLayerOption("ShaderCheckerLogFilename");
138 if (strOpt)
139 {
140 g_logFile = fopen(strOpt, "w");
141 }
142 if (g_logFile == NULL)
143 g_logFile = stdout;
144 }
145}
146
147
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600148static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *devw)
Chris Forbes2778f302015-04-02 13:22:31 +1300149{
150 VkLayerDispatchTable *pTable;
151
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600152 assert(devw);
153 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) devw->baseObject);
Chris Forbes2778f302015-04-02 13:22:31 +1300154 if (it == tableMap.end())
155 {
156 pTable = new VkLayerDispatchTable;
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600157 tableMap[(void *) devw->baseObject] = pTable;
Chris Forbes2778f302015-04-02 13:22:31 +1300158 } else
159 {
160 return it->second;
161 }
162
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600163 layer_initialize_dispatch_table(pTable, (PFN_vkGetDeviceProcAddr) devw->pGPA, (VkDevice) devw->nextObject);
Chris Forbes2778f302015-04-02 13:22:31 +1300164
165 return pTable;
166}
167
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -0600168static VkLayerInstanceDispatchTable * initLayerInstanceTable(const VkBaseLayerObject *instw)
169{
170 VkLayerInstanceDispatchTable *pTable;
171
172 assert(instw);
173 std::unordered_map<void *, VkLayerInstanceDispatchTable *>::const_iterator it = tableInstanceMap.find((void *) instw->baseObject);
174 if (it == tableInstanceMap.end())
175 {
176 pTable = new VkLayerInstanceDispatchTable;
177 tableInstanceMap[(void *) instw->baseObject] = pTable;
178 } else
179 {
180 return it->second;
181 }
182
183 layer_init_instance_dispatch_table(pTable, (PFN_vkGetInstanceProcAddr) instw->pGPA, (VkInstance) instw->nextObject);
184
185 return pTable;
186}
Chris Forbes2778f302015-04-02 13:22:31 +1300187
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600188VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbes2778f302015-04-02 13:22:31 +1300189{
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600190 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300191 return VK_ERROR_INVALID_POINTER;
192
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600193 if (*pLayerCount < 1)
Chris Forbes2778f302015-04-02 13:22:31 +1300194 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600195 *pLayerCount = 1;
Chris Forbes2778f302015-04-02 13:22:31 +1300196 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
197 return VK_SUCCESS;
198}
199
200
201struct extProps {
202 uint32_t version;
203 const char * const name;
204};
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600205#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbes2778f302015-04-02 13:22:31 +1300206static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
207 // TODO what is the version?
208 0x10, "ShaderChecker",
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600209 0x10, "Validation",
Chris Forbes2778f302015-04-02 13:22:31 +1300210};
211
Chris Forbes2778f302015-04-02 13:22:31 +1300212VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
213 VkExtensionInfoType infoType,
214 uint32_t extensionIndex,
215 size_t* pDataSize,
216 void* pData)
217{
Chris Forbes2778f302015-04-02 13:22:31 +1300218 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
219 VkExtensionProperties *ext_props;
220 uint32_t *count;
221
222 if (pDataSize == NULL)
223 return VK_ERROR_INVALID_POINTER;
224
225 switch (infoType) {
226 case VK_EXTENSION_INFO_TYPE_COUNT:
227 *pDataSize = sizeof(uint32_t);
228 if (pData == NULL)
229 return VK_SUCCESS;
230 count = (uint32_t *) pData;
231 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
232 break;
233 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
234 *pDataSize = sizeof(VkExtensionProperties);
235 if (pData == NULL)
236 return VK_SUCCESS;
237 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
238 return VK_ERROR_INVALID_VALUE;
239 ext_props = (VkExtensionProperties *) pData;
240 ext_props->version = shaderCheckerExts[extensionIndex].version;
241 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
242 VK_MAX_EXTENSION_NAME);
243 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
244 break;
245 default:
246 return VK_ERROR_INVALID_VALUE;
247 };
248
249 return VK_SUCCESS;
250}
251
252
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200253static char const *
254storage_class_name(unsigned sc)
255{
256 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600257 case spv::StorageClassInput: return "input";
258 case spv::StorageClassOutput: return "output";
259 case spv::StorageClassUniformConstant: return "const uniform";
260 case spv::StorageClassUniform: return "uniform";
261 case spv::StorageClassWorkgroupLocal: return "workgroup local";
262 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
263 case spv::StorageClassPrivateGlobal: return "private global";
264 case spv::StorageClassFunction: return "function";
265 case spv::StorageClassGeneric: return "generic";
266 case spv::StorageClassPrivate: return "private";
267 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200268 default: return "unknown";
269 }
270}
271
272
273/* returns ptr to null terminator */
274static char *
275describe_type(char *dst, shader_source const *src, unsigned type)
276{
277 auto type_def_it = src->type_def_index.find(type);
278
279 if (type_def_it == src->type_def_index.end()) {
280 return dst + sprintf(dst, "undef");
281 }
282
283 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
284 unsigned opcode = code[0] & 0x0ffffu;
285 switch (opcode) {
286 case spv::OpTypeBool:
287 return dst + sprintf(dst, "bool");
288 case spv::OpTypeInt:
289 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
290 case spv::OpTypeFloat:
291 return dst + sprintf(dst, "float%d", code[2]);
292 case spv::OpTypeVector:
293 dst += sprintf(dst, "vec%d of ", code[3]);
294 return describe_type(dst, src, code[2]);
295 case spv::OpTypeMatrix:
296 dst += sprintf(dst, "mat%d of ", code[3]);
297 return describe_type(dst, src, code[2]);
298 case spv::OpTypeArray:
299 dst += sprintf(dst, "arr[%d] of ", code[3]);
300 return describe_type(dst, src, code[2]);
301 case spv::OpTypePointer:
302 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
303 return describe_type(dst, src, code[3]);
304 case spv::OpTypeStruct:
305 {
306 unsigned oplen = code[0] >> 16;
307 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600308 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200309 dst = describe_type(dst, src, code[i]);
310 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
311 }
312 return dst;
313 }
314 default:
315 return dst + sprintf(dst, "oddtype");
316 }
317}
318
319
320static bool
Chris Forbesf3fc0332015-06-05 14:57:05 +1200321types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200322{
323 auto a_type_def_it = a->type_def_index.find(a_type);
324 auto b_type_def_it = b->type_def_index.find(b_type);
325
326 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200327 return false;
328 }
329
330 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200331 return false;
332 }
333
334 /* walk two type trees together, and complain about differences */
335 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
336 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
337
338 unsigned a_opcode = a_code[0] & 0x0ffffu;
339 unsigned b_opcode = b_code[0] & 0x0ffffu;
340
Chris Forbesf3fc0332015-06-05 14:57:05 +1200341 if (b_arrayed && b_opcode == spv::OpTypeArray) {
342 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
343 return types_match(a, b, a_type, b_code[2], false);
344 }
345
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200346 if (a_opcode != b_opcode) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200347 return false;
348 }
349
350 switch (a_opcode) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200351 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200352 case spv::OpTypeBool:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200353 return true && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200354 case spv::OpTypeInt:
355 /* match on width, signedness */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200356 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200357 case spv::OpTypeFloat:
358 /* match on width */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200359 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200360 case spv::OpTypeVector:
361 case spv::OpTypeMatrix:
362 case spv::OpTypeArray:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200363 /* match on element type, count. these all have the same layout. we don't get here if
364 * b_arrayed -- that is handled above. */
365 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200366 case spv::OpTypeStruct:
367 /* match on all element types */
368 {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200369 if (b_arrayed) {
370 /* for the purposes of matching different levels of arrayness, structs are leaves. */
371 return false;
372 }
373
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200374 unsigned a_len = a_code[0] >> 16;
375 unsigned b_len = b_code[0] >> 16;
376
377 if (a_len != b_len) {
378 return false; /* structs cannot match if member counts differ */
379 }
380
Ian Elliott1cb62222015-04-17 11:05:04 -0600381 for (unsigned i = 2; i < a_len; i++) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200382 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200383 return false;
384 }
385 }
386
387 return true;
388 }
389 case spv::OpTypePointer:
390 /* match on pointee type. storage class is expected to differ */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200391 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200392
393 default:
394 /* remaining types are CLisms, or may not appear in the interfaces we
395 * are interested in. Just claim no match.
396 */
397 return false;
398
399 }
400}
401
402
Chris Forbes06e8fc32015-04-13 12:14:52 +1200403static int
404value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
405{
406 auto it = map.find(id);
407 if (it == map.end())
408 return def;
409 else
410 return it->second;
411}
412
413
414struct interface_var {
415 uint32_t id;
416 uint32_t type_id;
417 /* TODO: collect the name, too? Isn't required to be present. */
418};
419
420
421static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600422collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200423 std::map<uint32_t, interface_var> &out,
424 std::map<uint32_t, interface_var> &builtins_out)
425{
426 unsigned int const *code = (unsigned int const *)&src->words[0];
427 size_t size = src->words.size();
428
Chris Forbes06e8fc32015-04-13 12:14:52 +1200429 std::unordered_map<unsigned, unsigned> var_locations;
430 std::unordered_map<unsigned, unsigned> var_builtins;
431
432 unsigned word = 5;
433 while (word < size) {
434
435 unsigned opcode = code[word] & 0x0ffffu;
436 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
437
438 /* We consider two interface models: SSO rendezvous-by-location, and
439 * builtins. Complain about anything that fits neither model.
440 */
441 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600442 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200443 var_locations[code[word+1]] = code[word+3];
444 }
445
Cody Northrop97e52d82015-04-20 14:09:40 -0600446 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200447 var_builtins[code[word+1]] = code[word+3];
448 }
449 }
450
451 /* TODO: handle grouped decorations */
452 /* TODO: handle index=1 dual source outputs from FS -- two vars will
453 * have the same location, and we DONT want to clobber. */
454
Ian Elliott1cb62222015-04-17 11:05:04 -0600455 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200456 int location = value_or_default(var_locations, code[word+2], -1);
457 int builtin = value_or_default(var_builtins, code[word+2], -1);
458
459 if (location == -1 && builtin == -1) {
460 /* No location defined, and not bound to an API builtin.
461 * The spec says nothing about how this case works (or doesn't)
462 * for interface matching.
463 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200464 char str[1024];
465 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600466 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes6b2ead62015-04-17 10:13:28 +1200467 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200468 }
469 else if (location != -1) {
470 /* A user-defined interface variable, with a location. */
471 interface_var v;
472 v.id = code[word+2];
473 v.type_id = code[word+1];
474 out[location] = v;
475 }
476 else {
477 /* A builtin interface variable */
478 interface_var v;
479 v.id = code[word+2];
480 v.type_id = code[word+1];
481 builtins_out[builtin] = v;
482 }
483 }
484
485 word += oplen;
486 }
487}
488
489
Chris Forbes2778f302015-04-02 13:22:31 +1300490VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
491 VkShader *pShader)
492{
Chris Forbes7f963832015-05-29 14:55:18 +1200493 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300494 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
495 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200496
497 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes7f963832015-05-29 14:55:18 +1200498 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300499 return res;
500}
501
502
Chris Forbesee99b9b2015-05-25 11:13:22 +1200503static bool
Chris Forbes41002452015-04-08 10:19:16 +1200504validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbesf044ec92015-06-05 15:01:08 +1200505 shader_source const *consumer, char const *consumer_name,
506 bool consumer_arrayed_input)
Chris Forbes41002452015-04-08 10:19:16 +1200507{
508 std::map<uint32_t, interface_var> outputs;
509 std::map<uint32_t, interface_var> inputs;
510
511 std::map<uint32_t, interface_var> builtin_outputs;
512 std::map<uint32_t, interface_var> builtin_inputs;
513
Chris Forbes6b2ead62015-04-17 10:13:28 +1200514 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200515 bool pass = true;
Chris Forbes41002452015-04-08 10:19:16 +1200516
Cody Northrop97e52d82015-04-20 14:09:40 -0600517 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
518 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200519
520 auto a_it = outputs.begin();
521 auto b_it = inputs.begin();
522
523 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600524 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
525 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
526 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200527 auto a_first = a_at_end ? 0 : a_it->first;
528 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600529
530 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200531 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600532 producer_name, a_first, consumer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200533 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200534 a_it++;
535 }
David Pinedod8f83d82015-04-27 16:36:17 -0600536 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200537 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600538 consumer_name, b_first, producer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200539 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200540 pass = false;
Chris Forbes41002452015-04-08 10:19:16 +1200541 b_it++;
542 }
543 else {
Chris Forbesf044ec92015-06-05 15:01:08 +1200544 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200545 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200546 }
547 else {
548 char producer_type[1024];
549 char consumer_type[1024];
550 describe_type(producer_type, producer, a_it->second.type_id);
551 describe_type(consumer_type, consumer, b_it->second.type_id);
552
Chris Forbes6b2ead62015-04-17 10:13:28 +1200553 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200554 producer_type, consumer_type);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200555 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200556 pass = false;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200557 }
Chris Forbes41002452015-04-08 10:19:16 +1200558 a_it++;
559 b_it++;
560 }
561 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200562
563 return pass;
Chris Forbes41002452015-04-08 10:19:16 +1200564}
565
566
Chris Forbes3616b462015-04-08 10:37:20 +1200567enum FORMAT_TYPE {
568 FORMAT_TYPE_UNDEFINED,
569 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
570 FORMAT_TYPE_SINT,
571 FORMAT_TYPE_UINT,
572};
573
574
575static unsigned
576get_format_type(VkFormat fmt) {
577 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800578 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200579 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800580 case VK_FORMAT_R8_SINT:
581 case VK_FORMAT_R8G8_SINT:
582 case VK_FORMAT_R8G8B8_SINT:
583 case VK_FORMAT_R8G8B8A8_SINT:
584 case VK_FORMAT_R16_SINT:
585 case VK_FORMAT_R16G16_SINT:
586 case VK_FORMAT_R16G16B16_SINT:
587 case VK_FORMAT_R16G16B16A16_SINT:
588 case VK_FORMAT_R32_SINT:
589 case VK_FORMAT_R32G32_SINT:
590 case VK_FORMAT_R32G32B32_SINT:
591 case VK_FORMAT_R32G32B32A32_SINT:
592 case VK_FORMAT_B8G8R8_SINT:
593 case VK_FORMAT_B8G8R8A8_SINT:
594 case VK_FORMAT_R10G10B10A2_SINT:
595 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200596 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800597 case VK_FORMAT_R8_UINT:
598 case VK_FORMAT_R8G8_UINT:
599 case VK_FORMAT_R8G8B8_UINT:
600 case VK_FORMAT_R8G8B8A8_UINT:
601 case VK_FORMAT_R16_UINT:
602 case VK_FORMAT_R16G16_UINT:
603 case VK_FORMAT_R16G16B16_UINT:
604 case VK_FORMAT_R16G16B16A16_UINT:
605 case VK_FORMAT_R32_UINT:
606 case VK_FORMAT_R32G32_UINT:
607 case VK_FORMAT_R32G32B32_UINT:
608 case VK_FORMAT_R32G32B32A32_UINT:
609 case VK_FORMAT_B8G8R8_UINT:
610 case VK_FORMAT_B8G8R8A8_UINT:
611 case VK_FORMAT_R10G10B10A2_UINT:
612 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200613 return FORMAT_TYPE_UINT;
614 default:
615 return FORMAT_TYPE_FLOAT;
616 }
617}
618
619
Chris Forbes156a1162015-05-04 14:04:06 +1200620/* characterizes a SPIR-V type appearing in an interface to a FF stage,
621 * for comparison to a VkFormat's characterization above. */
622static unsigned
623get_fundamental_type(shader_source const *src, unsigned type)
624{
625 auto type_def_it = src->type_def_index.find(type);
626
627 if (type_def_it == src->type_def_index.end()) {
628 return FORMAT_TYPE_UNDEFINED;
629 }
630
631 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
632 unsigned opcode = code[0] & 0x0ffffu;
633 switch (opcode) {
634 case spv::OpTypeInt:
635 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
636 case spv::OpTypeFloat:
637 return FORMAT_TYPE_FLOAT;
638 case spv::OpTypeVector:
639 return get_fundamental_type(src, code[2]);
640 case spv::OpTypeMatrix:
641 return get_fundamental_type(src, code[2]);
642 case spv::OpTypeArray:
643 return get_fundamental_type(src, code[2]);
644 case spv::OpTypePointer:
645 return get_fundamental_type(src, code[3]);
646 default:
647 return FORMAT_TYPE_UNDEFINED;
648 }
649}
650
651
Chris Forbesee99b9b2015-05-25 11:13:22 +1200652static bool
Chris Forbes280ba2c2015-06-12 11:16:41 +1200653validate_vi_consistency(VkPipelineVertexInputCreateInfo const *vi)
654{
655 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
656 * each binding should be specified only once.
657 */
658 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
659 char str[1024];
660 bool pass = true;
661
662 for (unsigned i = 0; i < vi->bindingCount; i++) {
663 auto desc = &vi->pVertexBindingDescriptions[i];
664 auto & binding = bindings[desc->binding];
665 if (binding) {
666 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
667 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
668 pass = false;
669 }
670 else {
671 binding = desc;
672 }
673 }
674
675 return pass;
676}
677
678
679static bool
Chris Forbes772d03b2015-04-08 10:36:37 +1200680validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
681{
682 std::map<uint32_t, interface_var> inputs;
683 /* we collect builtin inputs, but they will never appear in the VI state --
684 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
685 */
686 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200687 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200688 bool pass = true;
Chris Forbes772d03b2015-04-08 10:36:37 +1200689
Cody Northrop97e52d82015-04-20 14:09:40 -0600690 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200691
692 /* Build index by location */
693 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes7191cd52015-05-25 11:13:24 +1200694 if (vi) {
695 for (unsigned i = 0; i < vi->attributeCount; i++)
696 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
697 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200698
699 auto it_a = attribs.begin();
700 auto it_b = inputs.begin();
701
David Pinedod8f83d82015-04-27 16:36:17 -0600702 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
703 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
704 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200705 auto a_first = a_at_end ? 0 : it_a->first;
706 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600707 if (b_at_end || a_first < b_first) {
708 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200709 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200710 it_a++;
711 }
David Pinedod8f83d82015-04-27 16:36:17 -0600712 else if (a_at_end || b_first < a_first) {
713 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200714 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200715 pass = false;
Chris Forbes772d03b2015-04-08 10:36:37 +1200716 it_b++;
717 }
718 else {
Chris Forbes401784b2015-05-04 14:04:24 +1200719 unsigned attrib_type = get_format_type(it_a->second->format);
720 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
721
722 /* type checking */
723 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
724 char vs_type[1024];
725 describe_type(vs_type, vs, it_b->second.type_id);
726 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
727 string_VkFormat(it_a->second->format), a_first, vs_type);
728 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200729 pass = false;
Chris Forbes401784b2015-05-04 14:04:24 +1200730 }
731
Chris Forbes6b2ead62015-04-17 10:13:28 +1200732 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200733 it_a++;
734 it_b++;
735 }
736 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200737
738 return pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200739}
740
741
Chris Forbesee99b9b2015-05-25 11:13:22 +1200742static bool
Chris Forbes3616b462015-04-08 10:37:20 +1200743validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
744{
745 std::map<uint32_t, interface_var> outputs;
746 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200747 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200748 bool pass = true;
Chris Forbes3616b462015-04-08 10:37:20 +1200749
750 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
751
Cody Northrop97e52d82015-04-20 14:09:40 -0600752 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200753
754 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
755 * and all color attachment should be UNORM/SNORM/FLOAT.
756 */
757 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes3616b462015-04-08 10:37:20 +1200758 if (outputs.size()) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200759 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
760 "Should not have user-defined FS outputs when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200761 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200762 }
763
Ian Elliott1cb62222015-04-17 11:05:04 -0600764 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200765 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
766 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200767 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
768 "CB format should not be SINT or UINT when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200769 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200770 }
771 }
772
Chris Forbesee99b9b2015-05-25 11:13:22 +1200773 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200774 }
775
776 auto it = outputs.begin();
777 uint32_t attachment = 0;
778
779 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
780 * are currently dense, but the parallel with matching between shader stages is nice.
781 */
782
Chris Forbesbf2b1d22015-05-05 11:34:14 +1200783 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan3a22ce92015-06-01 19:48:11 +0200784 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200785 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
786 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200787 it++;
788 }
789 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200790 sprintf(str, "Attachment %d not written by FS", attachment);
791 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200792 attachment++;
Chris Forbesee99b9b2015-05-25 11:13:22 +1200793 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200794 }
795 else {
Chris Forbes46d31e52015-05-04 14:20:10 +1200796 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
797 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
798
799 /* type checking */
800 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
801 char fs_type[1024];
802 describe_type(fs_type, fs, it->second.type_id);
803 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
804 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
805 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200806 pass = false;
Chris Forbes46d31e52015-05-04 14:20:10 +1200807 }
808
Chris Forbes6b2ead62015-04-17 10:13:28 +1200809 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200810 it++;
811 attachment++;
812 }
813 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200814
815 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200816}
817
818
Chris Forbesf044ec92015-06-05 15:01:08 +1200819struct shader_stage_attributes {
820 char const * const name;
821 bool arrayed_input;
822};
823
824
825static shader_stage_attributes
826shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
827 { "vertex shader", false },
828 { "tessellation control shader", true },
829 { "tessellation evaluation shader", false },
830 { "geometry shader", true },
831 { "fragment shader", false },
832};
833
834
Chris Forbes81874ba2015-06-04 20:23:00 +1200835static bool
836validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes4175e6f2015-04-08 10:15:35 +1200837{
Chris Forbesf6800b52015-04-08 10:16:45 +1200838 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
839 * before trying to do anything more: */
840
Chris Forbesf044ec92015-06-05 15:01:08 +1200841 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
842 memset(shaders, 0, sizeof(shaders));
Chris Forbesf6800b52015-04-08 10:16:45 +1200843 VkPipelineCbStateCreateInfo const *cb = 0;
844 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200845 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200846 bool pass = true;
Chris Forbesf6800b52015-04-08 10:16:45 +1200847
Chris Forbes7f963832015-05-29 14:55:18 +1200848 loader_platform_thread_lock_mutex(&globalLock);
849
Chris Forbesf6800b52015-04-08 10:16:45 +1200850 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
851 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
852 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
853
Chris Forbesf044ec92015-06-05 15:01:08 +1200854 if (shader_stage->shader.stage < VK_SHADER_STAGE_VERTEX || shader_stage->shader.stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200855 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
856 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
857 }
Chris Forbesf044ec92015-06-05 15:01:08 +1200858 else {
859 shaders[shader_stage->shader.stage] = shader_map[(void *)(shader_stage->shader.shader)];
860 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200861 }
862 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
863 cb = (VkPipelineCbStateCreateInfo const *)stage;
864 }
865 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
866 vi = (VkPipelineVertexInputCreateInfo const *)stage;
867 }
868 }
869
Chris Forbes280ba2c2015-06-12 11:16:41 +1200870 if (vi) {
871 pass = validate_vi_consistency(vi) && pass;
872 }
873
Chris Forbesf044ec92015-06-05 15:01:08 +1200874 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
875 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200876 }
877
Chris Forbesf044ec92015-06-05 15:01:08 +1200878 /* TODO: enforce rules about present combinations of shaders */
879 int producer = VK_SHADER_STAGE_VERTEX;
880 int consumer = VK_SHADER_STAGE_GEOMETRY;
881
882 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
883 producer++;
884 consumer++;
Chris Forbes41002452015-04-08 10:19:16 +1200885 }
886
Tony Barbour0102a902015-06-11 15:04:25 -0600887 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbesf044ec92015-06-05 15:01:08 +1200888 assert(shaders[producer]);
889 if (shaders[consumer]) {
890 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
891 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
892 shaders[consumer], shader_stage_attribs[consumer].name,
893 shader_stage_attribs[consumer].arrayed_input) && pass;
894 }
895
896 producer = consumer;
897 }
898 }
899
900 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
901 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200902 }
903
Chris Forbes7f963832015-05-29 14:55:18 +1200904 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes81874ba2015-06-04 20:23:00 +1200905 return pass;
906}
907
908
Chris Forbes39d8d752015-06-04 20:27:09 +1200909VK_LAYER_EXPORT VkResult VKAPI
910vkCreateGraphicsPipeline(VkDevice device,
911 const VkGraphicsPipelineCreateInfo *pCreateInfo,
912 VkPipeline *pPipeline)
Chris Forbes81874ba2015-06-04 20:23:00 +1200913{
914 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200915
916 if (pass) {
917 /* The driver is allowed to crash if passed junk. Only actually create the
918 * pipeline if we didn't run into any showstoppers above.
919 */
Chris Forbes81874ba2015-06-04 20:23:00 +1200920 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200921 return pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
922 }
923 else {
924 return VK_ERROR_UNKNOWN;
925 }
Chris Forbes4175e6f2015-04-08 10:15:35 +1200926}
927
928
Chris Forbes39d8d752015-06-04 20:27:09 +1200929VK_LAYER_EXPORT VkResult VKAPI
930vkCreateGraphicsPipelineDerivative(VkDevice device,
931 const VkGraphicsPipelineCreateInfo *pCreateInfo,
932 VkPipeline basePipeline,
933 VkPipeline *pPipeline)
934{
935 bool pass = validate_graphics_pipeline(pCreateInfo);
936
937 if (pass) {
938 /* The driver is allowed to crash if passed junk. Only actually create the
939 * pipeline if we didn't run into any showstoppers above.
940 */
941 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
942 return pTable->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
943 }
944 else {
945 return VK_ERROR_UNKNOWN;
946 }
947}
948
949
Chris Forbesdb467bd2015-05-25 11:12:59 +1200950VK_LAYER_EXPORT VkResult VKAPI vkDbgRegisterMsgCallback(
951 VkInstance instance,
952 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback,
953 void *pUserData)
954{
955 // This layer intercepts callbacks
956 VK_LAYER_DBG_FUNCTION_NODE *pNewDbgFuncNode = (VK_LAYER_DBG_FUNCTION_NODE*)malloc(sizeof(VK_LAYER_DBG_FUNCTION_NODE));
957 if (!pNewDbgFuncNode)
958 return VK_ERROR_OUT_OF_HOST_MEMORY;
959 pNewDbgFuncNode->pfnMsgCallback = pfnMsgCallback;
960 pNewDbgFuncNode->pUserData = pUserData;
961 pNewDbgFuncNode->pNext = g_pDbgFunctionHead;
962 g_pDbgFunctionHead = pNewDbgFuncNode;
963 // force callbacks if DebugAction hasn't been set already other than initial value
964 if (g_actionIsDefault) {
965 g_debugAction = VK_DBG_LAYER_ACTION_CALLBACK;
966 }
Chris Forbes7f963832015-05-29 14:55:18 +1200967 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
Jon Ashburn95a77ba2015-05-15 15:09:35 -0600968 VkLayerInstanceDispatchTable *pTable = tableInstanceMap[pCurObj];
Chris Forbes7f963832015-05-29 14:55:18 +1200969 VkResult result = pTable->DbgRegisterMsgCallback(instance, pfnMsgCallback, pUserData);
Chris Forbesdb467bd2015-05-25 11:12:59 +1200970 return result;
971}
972
973VK_LAYER_EXPORT VkResult VKAPI vkDbgUnregisterMsgCallback(
974 VkInstance instance,
975 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback)
976{
977 VK_LAYER_DBG_FUNCTION_NODE *pInfo = g_pDbgFunctionHead;
978 VK_LAYER_DBG_FUNCTION_NODE *pPrev = pInfo;
979 while (pInfo) {
980 if (pInfo->pfnMsgCallback == pfnMsgCallback) {
981 pPrev->pNext = pInfo->pNext;
982 if (g_pDbgFunctionHead == pInfo) {
983 g_pDbgFunctionHead = pInfo->pNext;
984 }
985 free(pInfo);
986 break;
987 }
988 pPrev = pInfo;
989 pInfo = pInfo->pNext;
990 }
991 if (g_pDbgFunctionHead == NULL) {
992 if (g_actionIsDefault) {
993 g_debugAction = VK_DBG_LAYER_ACTION_LOG_MSG;
994 } else {
995 g_debugAction = (VK_LAYER_DBG_ACTION)(g_debugAction & ~((uint32_t)VK_DBG_LAYER_ACTION_CALLBACK));
996 }
997 }
Chris Forbes7f963832015-05-29 14:55:18 +1200998 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
Jon Ashburn95a77ba2015-05-15 15:09:35 -0600999 VkLayerInstanceDispatchTable *pTable = tableInstanceMap[pCurObj];
Chris Forbes7f963832015-05-29 14:55:18 +12001000 VkResult result = pTable->DbgUnregisterMsgCallback(instance, pfnMsgCallback);
Chris Forbesdb467bd2015-05-25 11:12:59 +12001001 return result;
1002}
1003
1004
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001005VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice device, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +13001006{
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001007 if (device == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +13001008 return NULL;
1009
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001010 initLayerTable((const VkBaseLayerObject *) device);
Chris Forbes2778f302015-04-02 13:22:31 +13001011
Chris Forbesb6b8c462015-04-15 06:59:41 +12001012 loader_platform_thread_once(&g_initOnce, initLayer);
1013
Chris Forbes2778f302015-04-02 13:22:31 +13001014#define ADD_HOOK(fn) \
1015 if (!strncmp(#fn, pName, sizeof(#fn))) \
1016 return (void *) fn
1017
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001018 ADD_HOOK(vkGetDeviceProcAddr);
Chris Forbes2778f302015-04-02 13:22:31 +13001019 ADD_HOOK(vkCreateShader);
Chris Forbes4175e6f2015-04-08 10:15:35 +12001020 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbes39d8d752015-06-04 20:27:09 +12001021 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Jon Ashburne59f84f2015-05-18 09:08:41 -06001022#undef ADD_HOOK
Chris Forbes2778f302015-04-02 13:22:31 +13001023
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001024 VkBaseLayerObject* devw = (VkBaseLayerObject *) device;
1025 if (devw->pGPA == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +13001026 return NULL;
Jon Ashburn8d1b0b52015-05-18 13:20:15 -06001027 return devw->pGPA((VkObject) devw->nextObject, pName);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001028}
1029
1030VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1031{
1032 if (inst == NULL)
1033 return NULL;
1034
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -06001035 initLayerInstanceTable((const VkBaseLayerObject *) inst);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001036
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -06001037 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001038
1039#define ADD_HOOK(fn) \
1040 if (!strncmp(#fn, pName, sizeof(#fn))) \
1041 return (void *) fn
1042
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001043 ADD_HOOK(vkGetInstanceProcAddr);
1044 ADD_HOOK(vkEnumerateLayers);
1045 ADD_HOOK(vkGetGlobalExtensionInfo);
Jon Ashburne59f84f2015-05-18 09:08:41 -06001046#undef ADD_HOOK
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001047
1048 VkBaseLayerObject* instw = (VkBaseLayerObject *) inst;
1049 if (instw->pGPA == NULL)
1050 return NULL;
1051 return instw->pGPA((VkObject) instw->nextObject, pName);
Chris Forbes2778f302015-04-02 13:22:31 +13001052}