blob: f36fee395d41f16d3aa92cdeff784f3bc6bd7ccf [file] [log] [blame]
Chris Forbesaab9d112015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes67cc36f2015-04-13 12:14:52 +120027#include <map>
Chris Forbesaab9d112015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbesbb164b62015-04-08 10:19:16 +120029#include <map>
Chris Forbes4396ff52015-04-08 10:11:59 +120030#include <vector>
Chris Forbesaab9d112015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbes1b466bd2015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes3317b382015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes5c75afe2015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes32e3b462015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130043
Chris Forbesaab9d112015-04-02 13:22:31 +130044
Chris Forbes1b466bd2015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
Chris Forbes1ed0f982015-05-29 14:55:18 +120046static VkBaseLayerObject *pCurObj;
Chris Forbes1b466bd2015-04-15 06:59:41 +120047static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes1ed0f982015-05-29 14:55:18 +120048// TODO : This can be much smarter, using separate locks for separate global data
49static int globalLockInitialized = 0;
50static loader_platform_thread_mutex globalLock;
Chris Forbes4396ff52015-04-08 10:11:59 +120051
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120052
53static void
54build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
55{
56 unsigned int const *code = (unsigned int const *)&words[0];
57 size_t size = words.size();
58
59 unsigned word = 5;
60 while (word < size) {
61 unsigned opcode = code[word] & 0x0ffffu;
62 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
63
64 switch (opcode) {
65 case spv::OpTypeVoid:
66 case spv::OpTypeBool:
67 case spv::OpTypeInt:
68 case spv::OpTypeFloat:
69 case spv::OpTypeVector:
70 case spv::OpTypeMatrix:
71 case spv::OpTypeSampler:
72 case spv::OpTypeFilter:
73 case spv::OpTypeArray:
74 case spv::OpTypeRuntimeArray:
75 case spv::OpTypeStruct:
76 case spv::OpTypeOpaque:
77 case spv::OpTypePointer:
78 case spv::OpTypeFunction:
79 case spv::OpTypeEvent:
80 case spv::OpTypeDeviceEvent:
81 case spv::OpTypeReserveId:
82 case spv::OpTypeQueue:
83 case spv::OpTypePipe:
84 type_def_index[code[word+1]] = word;
85 break;
86
87 default:
88 /* We only care about type definitions */
89 break;
90 }
91
92 word += oplen;
93 }
94}
95
Chris Forbes4396ff52015-04-08 10:11:59 +120096struct shader_source {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120097 /* the spirv image itself */
Chris Forbes4396ff52015-04-08 10:11:59 +120098 std::vector<uint32_t> words;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120099 /* a mapping of <id> to the first word of its def. this is useful because walking type
100 * trees requires jumping all over the instruction stream.
101 */
102 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes4453c772015-06-05 15:01:08 +1200103 bool is_spirv;
Chris Forbes4396ff52015-04-08 10:11:59 +1200104
105 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbes4453c772015-06-05 15:01:08 +1200106 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
107 type_def_index(),
108 is_spirv(true) {
109
110 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
111 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
112 "Shader is not SPIR-V, most checks will not be possible");
113 is_spirv = false;
114 return;
115 }
116
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200117
118 build_type_def_index(words, type_def_index);
Chris Forbes4396ff52015-04-08 10:11:59 +1200119 }
120};
121
122
123static std::unordered_map<void *, shader_source *> shader_map;
124
125
Chris Forbes1b466bd2015-04-15 06:59:41 +1200126static void
127initLayer()
128{
129 const char *strOpt;
130 // initialize ShaderChecker options
131 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
132 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
133
134 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
135 {
136 strOpt = getLayerOption("ShaderCheckerLogFilename");
137 if (strOpt)
138 {
139 g_logFile = fopen(strOpt, "w");
140 }
141 if (g_logFile == NULL)
142 g_logFile = stdout;
143 }
144}
145
146
Chris Forbesaab9d112015-04-02 13:22:31 +1300147static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *gpuw)
148{
149 VkLayerDispatchTable *pTable;
150
151 assert(gpuw);
152 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) gpuw->baseObject);
153 if (it == tableMap.end())
154 {
155 pTable = new VkLayerDispatchTable;
156 tableMap[(void *) gpuw->baseObject] = pTable;
157 } else
158 {
159 return it->second;
160 }
161
Jon Ashburn79b78ac2015-05-05 14:22:52 -0600162 layer_initialize_dispatch_table(pTable, (PFN_vkGetProcAddr) gpuw->pGPA, (VkPhysicalDevice) gpuw->nextObject);
Chris Forbesaab9d112015-04-02 13:22:31 +1300163
164 return pTable;
165}
166
167
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800168VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
Chris Forbesaab9d112015-04-02 13:22:31 +1300169{
170 VkLayerDispatchTable* pTable = tableMap[gpu];
171 VkResult result = pTable->CreateDevice(gpu, pCreateInfo, pDevice);
Chris Forbes1b466bd2015-04-15 06:59:41 +1200172
173 loader_platform_thread_once(&g_initOnce, initLayer);
Chris Forbesaab9d112015-04-02 13:22:31 +1300174 // create a mapping for the device object into the dispatch table
175 tableMap.emplace(*pDevice, pTable);
Chris Forbes1ed0f982015-05-29 14:55:18 +1200176 pCurObj = (VkBaseLayerObject *) *pDevice;
Chris Forbesaab9d112015-04-02 13:22:31 +1300177 return result;
178}
179
180
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600181VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbesaab9d112015-04-02 13:22:31 +1300182{
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600183 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +1300184 return VK_ERROR_INVALID_POINTER;
185
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600186 if (*pLayerCount < 1)
Chris Forbesaab9d112015-04-02 13:22:31 +1300187 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterbb1f3602015-04-20 11:04:54 -0600188 *pLayerCount = 1;
Chris Forbesaab9d112015-04-02 13:22:31 +1300189 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
190 return VK_SUCCESS;
191}
192
193
194struct extProps {
195 uint32_t version;
196 const char * const name;
197};
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600198#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbesaab9d112015-04-02 13:22:31 +1300199static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
200 // TODO what is the version?
201 0x10, "ShaderChecker",
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600202 0x10, "Validation",
Chris Forbesaab9d112015-04-02 13:22:31 +1300203};
204
Chris Forbesaab9d112015-04-02 13:22:31 +1300205VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
206 VkExtensionInfoType infoType,
207 uint32_t extensionIndex,
208 size_t* pDataSize,
209 void* pData)
210{
Chris Forbesaab9d112015-04-02 13:22:31 +1300211 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
212 VkExtensionProperties *ext_props;
213 uint32_t *count;
214
215 if (pDataSize == NULL)
216 return VK_ERROR_INVALID_POINTER;
217
218 switch (infoType) {
219 case VK_EXTENSION_INFO_TYPE_COUNT:
220 *pDataSize = sizeof(uint32_t);
221 if (pData == NULL)
222 return VK_SUCCESS;
223 count = (uint32_t *) pData;
224 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
225 break;
226 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
227 *pDataSize = sizeof(VkExtensionProperties);
228 if (pData == NULL)
229 return VK_SUCCESS;
230 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
231 return VK_ERROR_INVALID_VALUE;
232 ext_props = (VkExtensionProperties *) pData;
233 ext_props->version = shaderCheckerExts[extensionIndex].version;
234 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
235 VK_MAX_EXTENSION_NAME);
236 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
237 break;
238 default:
239 return VK_ERROR_INVALID_VALUE;
240 };
241
242 return VK_SUCCESS;
243}
244
245
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200246static char const *
247storage_class_name(unsigned sc)
248{
249 switch (sc) {
Cody Northrop812b4612015-04-20 14:09:40 -0600250 case spv::StorageClassInput: return "input";
251 case spv::StorageClassOutput: return "output";
252 case spv::StorageClassUniformConstant: return "const uniform";
253 case spv::StorageClassUniform: return "uniform";
254 case spv::StorageClassWorkgroupLocal: return "workgroup local";
255 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
256 case spv::StorageClassPrivateGlobal: return "private global";
257 case spv::StorageClassFunction: return "function";
258 case spv::StorageClassGeneric: return "generic";
259 case spv::StorageClassPrivate: return "private";
260 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200261 default: return "unknown";
262 }
263}
264
265
266/* returns ptr to null terminator */
267static char *
268describe_type(char *dst, shader_source const *src, unsigned type)
269{
270 auto type_def_it = src->type_def_index.find(type);
271
272 if (type_def_it == src->type_def_index.end()) {
273 return dst + sprintf(dst, "undef");
274 }
275
276 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
277 unsigned opcode = code[0] & 0x0ffffu;
278 switch (opcode) {
279 case spv::OpTypeBool:
280 return dst + sprintf(dst, "bool");
281 case spv::OpTypeInt:
282 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
283 case spv::OpTypeFloat:
284 return dst + sprintf(dst, "float%d", code[2]);
285 case spv::OpTypeVector:
286 dst += sprintf(dst, "vec%d of ", code[3]);
287 return describe_type(dst, src, code[2]);
288 case spv::OpTypeMatrix:
289 dst += sprintf(dst, "mat%d of ", code[3]);
290 return describe_type(dst, src, code[2]);
291 case spv::OpTypeArray:
292 dst += sprintf(dst, "arr[%d] of ", code[3]);
293 return describe_type(dst, src, code[2]);
294 case spv::OpTypePointer:
295 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
296 return describe_type(dst, src, code[3]);
297 case spv::OpTypeStruct:
298 {
299 unsigned oplen = code[0] >> 16;
300 dst += sprintf(dst, "struct of (");
Ian Elliottf21f14b2015-04-17 11:05:04 -0600301 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200302 dst = describe_type(dst, src, code[i]);
303 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
304 }
305 return dst;
306 }
307 default:
308 return dst + sprintf(dst, "oddtype");
309 }
310}
311
312
313static bool
Chris Forbes0a94a372015-06-05 14:57:05 +1200314types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200315{
316 auto a_type_def_it = a->type_def_index.find(a_type);
317 auto b_type_def_it = b->type_def_index.find(b_type);
318
319 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200320 return false;
321 }
322
323 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200324 return false;
325 }
326
327 /* walk two type trees together, and complain about differences */
328 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
329 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
330
331 unsigned a_opcode = a_code[0] & 0x0ffffu;
332 unsigned b_opcode = b_code[0] & 0x0ffffu;
333
Chris Forbes0a94a372015-06-05 14:57:05 +1200334 if (b_arrayed && b_opcode == spv::OpTypeArray) {
335 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
336 return types_match(a, b, a_type, b_code[2], false);
337 }
338
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200339 if (a_opcode != b_opcode) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200340 return false;
341 }
342
343 switch (a_opcode) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200344 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200345 case spv::OpTypeBool:
Chris Forbes0a94a372015-06-05 14:57:05 +1200346 return true && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200347 case spv::OpTypeInt:
348 /* match on width, signedness */
Chris Forbes0a94a372015-06-05 14:57:05 +1200349 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200350 case spv::OpTypeFloat:
351 /* match on width */
Chris Forbes0a94a372015-06-05 14:57:05 +1200352 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200353 case spv::OpTypeVector:
354 case spv::OpTypeMatrix:
355 case spv::OpTypeArray:
Chris Forbes0a94a372015-06-05 14:57:05 +1200356 /* match on element type, count. these all have the same layout. we don't get here if
357 * b_arrayed -- that is handled above. */
358 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200359 case spv::OpTypeStruct:
360 /* match on all element types */
361 {
Chris Forbes0a94a372015-06-05 14:57:05 +1200362 if (b_arrayed) {
363 /* for the purposes of matching different levels of arrayness, structs are leaves. */
364 return false;
365 }
366
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200367 unsigned a_len = a_code[0] >> 16;
368 unsigned b_len = b_code[0] >> 16;
369
370 if (a_len != b_len) {
371 return false; /* structs cannot match if member counts differ */
372 }
373
Ian Elliottf21f14b2015-04-17 11:05:04 -0600374 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200375 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200376 return false;
377 }
378 }
379
380 return true;
381 }
382 case spv::OpTypePointer:
383 /* match on pointee type. storage class is expected to differ */
Chris Forbes0a94a372015-06-05 14:57:05 +1200384 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200385
386 default:
387 /* remaining types are CLisms, or may not appear in the interfaces we
388 * are interested in. Just claim no match.
389 */
390 return false;
391
392 }
393}
394
395
Chris Forbes67cc36f2015-04-13 12:14:52 +1200396static int
397value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
398{
399 auto it = map.find(id);
400 if (it == map.end())
401 return def;
402 else
403 return it->second;
404}
405
406
407struct interface_var {
408 uint32_t id;
409 uint32_t type_id;
410 /* TODO: collect the name, too? Isn't required to be present. */
411};
412
413
414static void
Ian Elliottf21f14b2015-04-17 11:05:04 -0600415collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes67cc36f2015-04-13 12:14:52 +1200416 std::map<uint32_t, interface_var> &out,
417 std::map<uint32_t, interface_var> &builtins_out)
418{
419 unsigned int const *code = (unsigned int const *)&src->words[0];
420 size_t size = src->words.size();
421
Chris Forbes67cc36f2015-04-13 12:14:52 +1200422 std::unordered_map<unsigned, unsigned> var_locations;
423 std::unordered_map<unsigned, unsigned> var_builtins;
424
425 unsigned word = 5;
426 while (word < size) {
427
428 unsigned opcode = code[word] & 0x0ffffu;
429 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
430
431 /* We consider two interface models: SSO rendezvous-by-location, and
432 * builtins. Complain about anything that fits neither model.
433 */
434 if (opcode == spv::OpDecorate) {
Cody Northrop812b4612015-04-20 14:09:40 -0600435 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200436 var_locations[code[word+1]] = code[word+3];
437 }
438
Cody Northrop812b4612015-04-20 14:09:40 -0600439 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200440 var_builtins[code[word+1]] = code[word+3];
441 }
442 }
443
444 /* TODO: handle grouped decorations */
445 /* TODO: handle index=1 dual source outputs from FS -- two vars will
446 * have the same location, and we DONT want to clobber. */
447
Ian Elliottf21f14b2015-04-17 11:05:04 -0600448 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200449 int location = value_or_default(var_locations, code[word+2], -1);
450 int builtin = value_or_default(var_builtins, code[word+2], -1);
451
452 if (location == -1 && builtin == -1) {
453 /* No location defined, and not bound to an API builtin.
454 * The spec says nothing about how this case works (or doesn't)
455 * for interface matching.
456 */
Chris Forbes5c75afe2015-04-17 10:13:28 +1200457 char str[1024];
458 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliottf21f14b2015-04-17 11:05:04 -0600459 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes5c75afe2015-04-17 10:13:28 +1200460 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes67cc36f2015-04-13 12:14:52 +1200461 }
462 else if (location != -1) {
463 /* A user-defined interface variable, with a location. */
464 interface_var v;
465 v.id = code[word+2];
466 v.type_id = code[word+1];
467 out[location] = v;
468 }
469 else {
470 /* A builtin interface variable */
471 interface_var v;
472 v.id = code[word+2];
473 v.type_id = code[word+1];
474 builtins_out[builtin] = v;
475 }
476 }
477
478 word += oplen;
479 }
480}
481
482
Chris Forbesaab9d112015-04-02 13:22:31 +1300483VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
484 VkShader *pShader)
485{
Chris Forbes1ed0f982015-05-29 14:55:18 +1200486 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbesaab9d112015-04-02 13:22:31 +1300487 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
488 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes4396ff52015-04-08 10:11:59 +1200489
490 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes1ed0f982015-05-29 14:55:18 +1200491 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesaab9d112015-04-02 13:22:31 +1300492 return res;
493}
494
495
Chris Forbes5f362d02015-05-25 11:13:22 +1200496static bool
Chris Forbesbb164b62015-04-08 10:19:16 +1200497validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbes4453c772015-06-05 15:01:08 +1200498 shader_source const *consumer, char const *consumer_name,
499 bool consumer_arrayed_input)
Chris Forbesbb164b62015-04-08 10:19:16 +1200500{
501 std::map<uint32_t, interface_var> outputs;
502 std::map<uint32_t, interface_var> inputs;
503
504 std::map<uint32_t, interface_var> builtin_outputs;
505 std::map<uint32_t, interface_var> builtin_inputs;
506
Chris Forbes5c75afe2015-04-17 10:13:28 +1200507 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200508 bool pass = true;
Chris Forbesbb164b62015-04-08 10:19:16 +1200509
Cody Northrop812b4612015-04-20 14:09:40 -0600510 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
511 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesbb164b62015-04-08 10:19:16 +1200512
513 auto a_it = outputs.begin();
514 auto b_it = inputs.begin();
515
516 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedof5997ab2015-04-27 16:36:17 -0600517 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
518 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
519 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200520 auto a_first = a_at_end ? 0 : a_it->first;
521 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600522
523 if (b_at_end || a_first < b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200524 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600525 producer_name, a_first, consumer_name);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200526 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesbb164b62015-04-08 10:19:16 +1200527 a_it++;
528 }
David Pinedof5997ab2015-04-27 16:36:17 -0600529 else if (a_at_end || a_first > b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200530 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600531 consumer_name, b_first, producer_name);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200532 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200533 pass = false;
Chris Forbesbb164b62015-04-08 10:19:16 +1200534 b_it++;
535 }
536 else {
Chris Forbes4453c772015-06-05 15:01:08 +1200537 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200538 /* OK! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200539 }
540 else {
541 char producer_type[1024];
542 char consumer_type[1024];
543 describe_type(producer_type, producer, a_it->second.type_id);
544 describe_type(consumer_type, consumer, b_it->second.type_id);
545
Chris Forbes5c75afe2015-04-17 10:13:28 +1200546 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200547 producer_type, consumer_type);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200548 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200549 pass = false;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200550 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200551 a_it++;
552 b_it++;
553 }
554 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200555
556 return pass;
Chris Forbesbb164b62015-04-08 10:19:16 +1200557}
558
559
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200560enum FORMAT_TYPE {
561 FORMAT_TYPE_UNDEFINED,
562 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
563 FORMAT_TYPE_SINT,
564 FORMAT_TYPE_UINT,
565};
566
567
568static unsigned
569get_format_type(VkFormat fmt) {
570 switch (fmt) {
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800571 case VK_FORMAT_UNDEFINED:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200572 return FORMAT_TYPE_UNDEFINED;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800573 case VK_FORMAT_R8_SINT:
574 case VK_FORMAT_R8G8_SINT:
575 case VK_FORMAT_R8G8B8_SINT:
576 case VK_FORMAT_R8G8B8A8_SINT:
577 case VK_FORMAT_R16_SINT:
578 case VK_FORMAT_R16G16_SINT:
579 case VK_FORMAT_R16G16B16_SINT:
580 case VK_FORMAT_R16G16B16A16_SINT:
581 case VK_FORMAT_R32_SINT:
582 case VK_FORMAT_R32G32_SINT:
583 case VK_FORMAT_R32G32B32_SINT:
584 case VK_FORMAT_R32G32B32A32_SINT:
585 case VK_FORMAT_B8G8R8_SINT:
586 case VK_FORMAT_B8G8R8A8_SINT:
587 case VK_FORMAT_R10G10B10A2_SINT:
588 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200589 return FORMAT_TYPE_SINT;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800590 case VK_FORMAT_R8_UINT:
591 case VK_FORMAT_R8G8_UINT:
592 case VK_FORMAT_R8G8B8_UINT:
593 case VK_FORMAT_R8G8B8A8_UINT:
594 case VK_FORMAT_R16_UINT:
595 case VK_FORMAT_R16G16_UINT:
596 case VK_FORMAT_R16G16B16_UINT:
597 case VK_FORMAT_R16G16B16A16_UINT:
598 case VK_FORMAT_R32_UINT:
599 case VK_FORMAT_R32G32_UINT:
600 case VK_FORMAT_R32G32B32_UINT:
601 case VK_FORMAT_R32G32B32A32_UINT:
602 case VK_FORMAT_B8G8R8_UINT:
603 case VK_FORMAT_B8G8R8A8_UINT:
604 case VK_FORMAT_R10G10B10A2_UINT:
605 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200606 return FORMAT_TYPE_UINT;
607 default:
608 return FORMAT_TYPE_FLOAT;
609 }
610}
611
612
Chris Forbes28c50882015-05-04 14:04:06 +1200613/* characterizes a SPIR-V type appearing in an interface to a FF stage,
614 * for comparison to a VkFormat's characterization above. */
615static unsigned
616get_fundamental_type(shader_source const *src, unsigned type)
617{
618 auto type_def_it = src->type_def_index.find(type);
619
620 if (type_def_it == src->type_def_index.end()) {
621 return FORMAT_TYPE_UNDEFINED;
622 }
623
624 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
625 unsigned opcode = code[0] & 0x0ffffu;
626 switch (opcode) {
627 case spv::OpTypeInt:
628 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
629 case spv::OpTypeFloat:
630 return FORMAT_TYPE_FLOAT;
631 case spv::OpTypeVector:
632 return get_fundamental_type(src, code[2]);
633 case spv::OpTypeMatrix:
634 return get_fundamental_type(src, code[2]);
635 case spv::OpTypeArray:
636 return get_fundamental_type(src, code[2]);
637 case spv::OpTypePointer:
638 return get_fundamental_type(src, code[3]);
639 default:
640 return FORMAT_TYPE_UNDEFINED;
641 }
642}
643
644
Chris Forbes5f362d02015-05-25 11:13:22 +1200645static bool
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200646validate_vi_consistency(VkPipelineVertexInputCreateInfo const *vi)
647{
648 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
649 * each binding should be specified only once.
650 */
651 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
652 char str[1024];
653 bool pass = true;
654
655 for (unsigned i = 0; i < vi->bindingCount; i++) {
656 auto desc = &vi->pVertexBindingDescriptions[i];
657 auto & binding = bindings[desc->binding];
658 if (binding) {
659 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
660 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
661 pass = false;
662 }
663 else {
664 binding = desc;
665 }
666 }
667
668 return pass;
669}
670
671
672static bool
Chris Forbesfcd05f12015-04-08 10:36:37 +1200673validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
674{
675 std::map<uint32_t, interface_var> inputs;
676 /* we collect builtin inputs, but they will never appear in the VI state --
677 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
678 */
679 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200680 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200681 bool pass = true;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200682
Cody Northrop812b4612015-04-20 14:09:40 -0600683 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200684
685 /* Build index by location */
686 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes6f2ab982015-05-25 11:13:24 +1200687 if (vi) {
688 for (unsigned i = 0; i < vi->attributeCount; i++)
689 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
690 }
Chris Forbesfcd05f12015-04-08 10:36:37 +1200691
692 auto it_a = attribs.begin();
693 auto it_b = inputs.begin();
694
David Pinedof5997ab2015-04-27 16:36:17 -0600695 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
696 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
697 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200698 auto a_first = a_at_end ? 0 : it_a->first;
699 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600700 if (b_at_end || a_first < b_first) {
701 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200702 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200703 it_a++;
704 }
David Pinedof5997ab2015-04-27 16:36:17 -0600705 else if (a_at_end || b_first < a_first) {
706 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200707 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200708 pass = false;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200709 it_b++;
710 }
711 else {
Chris Forbes3317b382015-05-04 14:04:24 +1200712 unsigned attrib_type = get_format_type(it_a->second->format);
713 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
714
715 /* type checking */
716 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
717 char vs_type[1024];
718 describe_type(vs_type, vs, it_b->second.type_id);
719 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
720 string_VkFormat(it_a->second->format), a_first, vs_type);
721 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200722 pass = false;
Chris Forbes3317b382015-05-04 14:04:24 +1200723 }
724
Chris Forbes5c75afe2015-04-17 10:13:28 +1200725 /* OK! */
Chris Forbesfcd05f12015-04-08 10:36:37 +1200726 it_a++;
727 it_b++;
728 }
729 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200730
731 return pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200732}
733
734
Chris Forbes5f362d02015-05-25 11:13:22 +1200735static bool
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200736validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
737{
738 std::map<uint32_t, interface_var> outputs;
739 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200740 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200741 bool pass = true;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200742
743 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
744
Cody Northrop812b4612015-04-20 14:09:40 -0600745 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200746
747 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
748 * and all color attachment should be UNORM/SNORM/FLOAT.
749 */
750 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200751 if (outputs.size()) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200752 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
753 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200754 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200755 }
756
Ian Elliottf21f14b2015-04-17 11:05:04 -0600757 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200758 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
759 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200760 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
761 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200762 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200763 }
764 }
765
Chris Forbes5f362d02015-05-25 11:13:22 +1200766 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200767 }
768
769 auto it = outputs.begin();
770 uint32_t attachment = 0;
771
772 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
773 * are currently dense, but the parallel with matching between shader stages is nice.
774 */
775
Chris Forbes8802c992015-05-05 11:34:14 +1200776 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan7a62cbe2015-06-01 19:48:11 +0200777 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200778 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
779 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200780 it++;
781 }
782 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200783 sprintf(str, "Attachment %d not written by FS", attachment);
784 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200785 attachment++;
Chris Forbes5f362d02015-05-25 11:13:22 +1200786 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200787 }
788 else {
Chris Forbes4b009002015-05-04 14:20:10 +1200789 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
790 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
791
792 /* type checking */
793 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
794 char fs_type[1024];
795 describe_type(fs_type, fs, it->second.type_id);
796 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
797 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
798 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200799 pass = false;
Chris Forbes4b009002015-05-04 14:20:10 +1200800 }
801
Chris Forbes5c75afe2015-04-17 10:13:28 +1200802 /* OK! */
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200803 it++;
804 attachment++;
805 }
806 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200807
808 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200809}
810
811
Chris Forbes4453c772015-06-05 15:01:08 +1200812struct shader_stage_attributes {
813 char const * const name;
814 bool arrayed_input;
815};
816
817
818static shader_stage_attributes
819shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
820 { "vertex shader", false },
821 { "tessellation control shader", true },
822 { "tessellation evaluation shader", false },
823 { "geometry shader", true },
824 { "fragment shader", false },
825};
826
827
Chris Forbesf1060ca2015-06-04 20:23:00 +1200828static bool
829validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes60540932015-04-08 10:15:35 +1200830{
Chris Forbes8f600932015-04-08 10:16:45 +1200831 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
832 * before trying to do anything more: */
833
Chris Forbes4453c772015-06-05 15:01:08 +1200834 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
835 memset(shaders, 0, sizeof(shaders));
Chris Forbes8f600932015-04-08 10:16:45 +1200836 VkPipelineCbStateCreateInfo const *cb = 0;
837 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200838 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200839 bool pass = true;
Chris Forbes8f600932015-04-08 10:16:45 +1200840
Chris Forbes1ed0f982015-05-29 14:55:18 +1200841 loader_platform_thread_lock_mutex(&globalLock);
842
Chris Forbes8f600932015-04-08 10:16:45 +1200843 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
844 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
845 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
846
Chris Forbes4453c772015-06-05 15:01:08 +1200847 if (shader_stage->shader.stage < VK_SHADER_STAGE_VERTEX || shader_stage->shader.stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200848 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
849 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
850 }
Chris Forbes4453c772015-06-05 15:01:08 +1200851 else {
852 shaders[shader_stage->shader.stage] = shader_map[(void *)(shader_stage->shader.shader)];
853 }
Chris Forbes8f600932015-04-08 10:16:45 +1200854 }
855 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
856 cb = (VkPipelineCbStateCreateInfo const *)stage;
857 }
858 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
859 vi = (VkPipelineVertexInputCreateInfo const *)stage;
860 }
861 }
862
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200863 if (vi) {
864 pass = validate_vi_consistency(vi) && pass;
865 }
866
Chris Forbes4453c772015-06-05 15:01:08 +1200867 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
868 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200869 }
870
Chris Forbes4453c772015-06-05 15:01:08 +1200871 /* TODO: enforce rules about present combinations of shaders */
872 int producer = VK_SHADER_STAGE_VERTEX;
873 int consumer = VK_SHADER_STAGE_GEOMETRY;
874
875 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
876 producer++;
877 consumer++;
Chris Forbesbb164b62015-04-08 10:19:16 +1200878 }
879
Tony Barbour4eb3cd12015-06-11 15:04:25 -0600880 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbes4453c772015-06-05 15:01:08 +1200881 assert(shaders[producer]);
882 if (shaders[consumer]) {
883 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
884 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
885 shaders[consumer], shader_stage_attribs[consumer].name,
886 shader_stage_attribs[consumer].arrayed_input) && pass;
887 }
888
889 producer = consumer;
890 }
891 }
892
893 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
894 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200895 }
896
Chris Forbes1ed0f982015-05-29 14:55:18 +1200897 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesf1060ca2015-06-04 20:23:00 +1200898 return pass;
899}
900
901
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200902VK_LAYER_EXPORT VkResult VKAPI
903vkCreateGraphicsPipeline(VkDevice device,
904 const VkGraphicsPipelineCreateInfo *pCreateInfo,
905 VkPipeline *pPipeline)
Chris Forbesf1060ca2015-06-04 20:23:00 +1200906{
907 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbes5f362d02015-05-25 11:13:22 +1200908
909 if (pass) {
910 /* The driver is allowed to crash if passed junk. Only actually create the
911 * pipeline if we didn't run into any showstoppers above.
912 */
Chris Forbesf1060ca2015-06-04 20:23:00 +1200913 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
Chris Forbes5f362d02015-05-25 11:13:22 +1200914 return pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
915 }
916 else {
917 return VK_ERROR_UNKNOWN;
918 }
Chris Forbes60540932015-04-08 10:15:35 +1200919}
920
921
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200922VK_LAYER_EXPORT VkResult VKAPI
923vkCreateGraphicsPipelineDerivative(VkDevice device,
924 const VkGraphicsPipelineCreateInfo *pCreateInfo,
925 VkPipeline basePipeline,
926 VkPipeline *pPipeline)
927{
928 bool pass = validate_graphics_pipeline(pCreateInfo);
929
930 if (pass) {
931 /* The driver is allowed to crash if passed junk. Only actually create the
932 * pipeline if we didn't run into any showstoppers above.
933 */
934 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
935 return pTable->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
936 }
937 else {
938 return VK_ERROR_UNKNOWN;
939 }
940}
941
942
Chris Forbesb65ba352015-05-25 11:12:59 +1200943VK_LAYER_EXPORT VkResult VKAPI vkDbgRegisterMsgCallback(
944 VkInstance instance,
945 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback,
946 void *pUserData)
947{
948 // This layer intercepts callbacks
949 VK_LAYER_DBG_FUNCTION_NODE *pNewDbgFuncNode = (VK_LAYER_DBG_FUNCTION_NODE*)malloc(sizeof(VK_LAYER_DBG_FUNCTION_NODE));
950 if (!pNewDbgFuncNode)
951 return VK_ERROR_OUT_OF_HOST_MEMORY;
952 pNewDbgFuncNode->pfnMsgCallback = pfnMsgCallback;
953 pNewDbgFuncNode->pUserData = pUserData;
954 pNewDbgFuncNode->pNext = g_pDbgFunctionHead;
955 g_pDbgFunctionHead = pNewDbgFuncNode;
956 // force callbacks if DebugAction hasn't been set already other than initial value
957 if (g_actionIsDefault) {
958 g_debugAction = VK_DBG_LAYER_ACTION_CALLBACK;
959 }
Chris Forbes1ed0f982015-05-29 14:55:18 +1200960 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
961 VkLayerDispatchTable *pTable = tableMap[pCurObj];
962 VkResult result = pTable->DbgRegisterMsgCallback(instance, pfnMsgCallback, pUserData);
Chris Forbesb65ba352015-05-25 11:12:59 +1200963 return result;
964}
965
966VK_LAYER_EXPORT VkResult VKAPI vkDbgUnregisterMsgCallback(
967 VkInstance instance,
968 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback)
969{
970 VK_LAYER_DBG_FUNCTION_NODE *pInfo = g_pDbgFunctionHead;
971 VK_LAYER_DBG_FUNCTION_NODE *pPrev = pInfo;
972 while (pInfo) {
973 if (pInfo->pfnMsgCallback == pfnMsgCallback) {
974 pPrev->pNext = pInfo->pNext;
975 if (g_pDbgFunctionHead == pInfo) {
976 g_pDbgFunctionHead = pInfo->pNext;
977 }
978 free(pInfo);
979 break;
980 }
981 pPrev = pInfo;
982 pInfo = pInfo->pNext;
983 }
984 if (g_pDbgFunctionHead == NULL) {
985 if (g_actionIsDefault) {
986 g_debugAction = VK_DBG_LAYER_ACTION_LOG_MSG;
987 } else {
988 g_debugAction = (VK_LAYER_DBG_ACTION)(g_debugAction & ~((uint32_t)VK_DBG_LAYER_ACTION_CALLBACK));
989 }
990 }
Chris Forbes1ed0f982015-05-29 14:55:18 +1200991 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
992 VkLayerDispatchTable *pTable = tableMap[pCurObj];
993 VkResult result = pTable->DbgUnregisterMsgCallback(instance, pfnMsgCallback);
Chris Forbesb65ba352015-05-25 11:12:59 +1200994 return result;
995}
996
997
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800998VK_LAYER_EXPORT void * VKAPI vkGetProcAddr(VkPhysicalDevice gpu, const char* pName)
Chris Forbesaab9d112015-04-02 13:22:31 +1300999{
1000 if (gpu == NULL)
1001 return NULL;
1002
1003 initLayerTable((const VkBaseLayerObject *) gpu);
1004
Chris Forbes1b466bd2015-04-15 06:59:41 +12001005 loader_platform_thread_once(&g_initOnce, initLayer);
1006
Chris Forbesaab9d112015-04-02 13:22:31 +13001007#define ADD_HOOK(fn) \
1008 if (!strncmp(#fn, pName, sizeof(#fn))) \
1009 return (void *) fn
1010
1011 ADD_HOOK(vkGetProcAddr);
1012 ADD_HOOK(vkEnumerateLayers);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001013 ADD_HOOK(vkGetGlobalExtensionInfo);
Chris Forbesaab9d112015-04-02 13:22:31 +13001014 ADD_HOOK(vkCreateDevice);
1015 ADD_HOOK(vkCreateShader);
Chris Forbes60540932015-04-08 10:15:35 +12001016 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbesd0f7f7c2015-06-04 20:27:09 +12001017 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Chris Forbesb65ba352015-05-25 11:12:59 +12001018 ADD_HOOK(vkDbgRegisterMsgCallback);
1019 ADD_HOOK(vkDbgUnregisterMsgCallback);
Chris Forbesaab9d112015-04-02 13:22:31 +13001020
1021 VkBaseLayerObject* gpuw = (VkBaseLayerObject *) gpu;
1022 if (gpuw->pGPA == NULL)
1023 return NULL;
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001024 return gpuw->pGPA((VkObject) gpuw->nextObject, pName);
1025}
1026
1027VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1028{
1029 if (inst == NULL)
1030 return NULL;
1031
1032 //TODO initLayerTable((const VkBaseLayerObject *) inst);
1033
1034 // TODO loader_platform_thread_once(&g_initOnce, initInstanceLayer);
1035
1036#define ADD_HOOK(fn) \
1037 if (!strncmp(#fn, pName, sizeof(#fn))) \
1038 return (void *) fn
1039
1040 ADD_HOOK(vkGetProcAddr);
1041 ADD_HOOK(vkGetInstanceProcAddr);
1042 ADD_HOOK(vkEnumerateLayers);
1043 ADD_HOOK(vkGetGlobalExtensionInfo);
1044 ADD_HOOK(vkCreateDevice);
1045
1046 VkBaseLayerObject* instw = (VkBaseLayerObject *) inst;
1047 if (instw->pGPA == NULL)
1048 return NULL;
1049 return instw->pGPA((VkObject) instw->nextObject, pName);
Chris Forbesaab9d112015-04-02 13:22:31 +13001050}