blob: fcad389a9431e7bf710f7d02f72da30abf5efd70 [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes401784b2015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes7f720542015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130043
Chris Forbes2778f302015-04-02 13:22:31 +130044
Chris Forbesb6b8c462015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
Chris Forbes7f963832015-05-29 14:55:18 +120046static VkBaseLayerObject *pCurObj;
Chris Forbesb6b8c462015-04-15 06:59:41 +120047static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes7f963832015-05-29 14:55:18 +120048// TODO : This can be much smarter, using separate locks for separate global data
49static int globalLockInitialized = 0;
50static loader_platform_thread_mutex globalLock;
Chris Forbes3b1c4212015-04-08 10:11:59 +120051
Chris Forbes3a5e99a2015-04-10 11:41:20 +120052
53static void
54build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
55{
56 unsigned int const *code = (unsigned int const *)&words[0];
57 size_t size = words.size();
58
59 unsigned word = 5;
60 while (word < size) {
61 unsigned opcode = code[word] & 0x0ffffu;
62 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
63
64 switch (opcode) {
65 case spv::OpTypeVoid:
66 case spv::OpTypeBool:
67 case spv::OpTypeInt:
68 case spv::OpTypeFloat:
69 case spv::OpTypeVector:
70 case spv::OpTypeMatrix:
71 case spv::OpTypeSampler:
72 case spv::OpTypeFilter:
73 case spv::OpTypeArray:
74 case spv::OpTypeRuntimeArray:
75 case spv::OpTypeStruct:
76 case spv::OpTypeOpaque:
77 case spv::OpTypePointer:
78 case spv::OpTypeFunction:
79 case spv::OpTypeEvent:
80 case spv::OpTypeDeviceEvent:
81 case spv::OpTypeReserveId:
82 case spv::OpTypeQueue:
83 case spv::OpTypePipe:
84 type_def_index[code[word+1]] = word;
85 break;
86
87 default:
88 /* We only care about type definitions */
89 break;
90 }
91
92 word += oplen;
93 }
94}
95
Chris Forbes3b1c4212015-04-08 10:11:59 +120096struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120097 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120098 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +120099 /* a mapping of <id> to the first word of its def. this is useful because walking type
100 * trees requires jumping all over the instruction stream.
101 */
102 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes3b1c4212015-04-08 10:11:59 +1200103
104 shader_source(VkShaderCreateInfo const *pCreateInfo) :
105 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200106
107 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200108 }
109};
110
111
112static std::unordered_map<void *, shader_source *> shader_map;
113
114
Chris Forbesb6b8c462015-04-15 06:59:41 +1200115static void
116initLayer()
117{
118 const char *strOpt;
119 // initialize ShaderChecker options
120 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
121 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
122
123 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
124 {
125 strOpt = getLayerOption("ShaderCheckerLogFilename");
126 if (strOpt)
127 {
128 g_logFile = fopen(strOpt, "w");
129 }
130 if (g_logFile == NULL)
131 g_logFile = stdout;
132 }
133}
134
135
Chris Forbes2778f302015-04-02 13:22:31 +1300136static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *gpuw)
137{
138 VkLayerDispatchTable *pTable;
139
140 assert(gpuw);
141 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) gpuw->baseObject);
142 if (it == tableMap.end())
143 {
144 pTable = new VkLayerDispatchTable;
145 tableMap[(void *) gpuw->baseObject] = pTable;
146 } else
147 {
148 return it->second;
149 }
150
Chia-I Wua3b9a202015-04-17 02:00:54 +0800151 layer_initialize_dispatch_table(pTable, gpuw->pGPA, (VkPhysicalDevice) gpuw->nextObject);
Chris Forbes7f963832015-05-29 14:55:18 +1200152 pCurObj = (VkBaseLayerObject *)gpuw->baseObject;
Chris Forbes2778f302015-04-02 13:22:31 +1300153
154 return pTable;
155}
156
157
Chia-I Wua3b9a202015-04-17 02:00:54 +0800158VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
Chris Forbes2778f302015-04-02 13:22:31 +1300159{
160 VkLayerDispatchTable* pTable = tableMap[gpu];
161 VkResult result = pTable->CreateDevice(gpu, pCreateInfo, pDevice);
Chris Forbesb6b8c462015-04-15 06:59:41 +1200162
163 loader_platform_thread_once(&g_initOnce, initLayer);
Chris Forbes2778f302015-04-02 13:22:31 +1300164 // create a mapping for the device object into the dispatch table
165 tableMap.emplace(*pDevice, pTable);
Chris Forbes7f963832015-05-29 14:55:18 +1200166 pCurObj = (VkBaseLayerObject *) *pDevice;
Chris Forbes2778f302015-04-02 13:22:31 +1300167 return result;
168}
169
170
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600171VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbes2778f302015-04-02 13:22:31 +1300172{
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600173 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300174 return VK_ERROR_INVALID_POINTER;
175
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600176 if (*pLayerCount < 1)
Chris Forbes2778f302015-04-02 13:22:31 +1300177 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600178 *pLayerCount = 1;
Chris Forbes2778f302015-04-02 13:22:31 +1300179 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
180 return VK_SUCCESS;
181}
182
183
184struct extProps {
185 uint32_t version;
186 const char * const name;
187};
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600188#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbes2778f302015-04-02 13:22:31 +1300189static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
190 // TODO what is the version?
191 0x10, "ShaderChecker",
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600192 0x10, "Validation",
Chris Forbes2778f302015-04-02 13:22:31 +1300193};
194
Chris Forbes2778f302015-04-02 13:22:31 +1300195VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
196 VkExtensionInfoType infoType,
197 uint32_t extensionIndex,
198 size_t* pDataSize,
199 void* pData)
200{
Chris Forbes2778f302015-04-02 13:22:31 +1300201 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
202 VkExtensionProperties *ext_props;
203 uint32_t *count;
204
205 if (pDataSize == NULL)
206 return VK_ERROR_INVALID_POINTER;
207
208 switch (infoType) {
209 case VK_EXTENSION_INFO_TYPE_COUNT:
210 *pDataSize = sizeof(uint32_t);
211 if (pData == NULL)
212 return VK_SUCCESS;
213 count = (uint32_t *) pData;
214 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
215 break;
216 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
217 *pDataSize = sizeof(VkExtensionProperties);
218 if (pData == NULL)
219 return VK_SUCCESS;
220 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
221 return VK_ERROR_INVALID_VALUE;
222 ext_props = (VkExtensionProperties *) pData;
223 ext_props->version = shaderCheckerExts[extensionIndex].version;
224 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
225 VK_MAX_EXTENSION_NAME);
226 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
227 break;
228 default:
229 return VK_ERROR_INVALID_VALUE;
230 };
231
232 return VK_SUCCESS;
233}
234
235
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200236static char const *
237storage_class_name(unsigned sc)
238{
239 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600240 case spv::StorageClassInput: return "input";
241 case spv::StorageClassOutput: return "output";
242 case spv::StorageClassUniformConstant: return "const uniform";
243 case spv::StorageClassUniform: return "uniform";
244 case spv::StorageClassWorkgroupLocal: return "workgroup local";
245 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
246 case spv::StorageClassPrivateGlobal: return "private global";
247 case spv::StorageClassFunction: return "function";
248 case spv::StorageClassGeneric: return "generic";
249 case spv::StorageClassPrivate: return "private";
250 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200251 default: return "unknown";
252 }
253}
254
255
256/* returns ptr to null terminator */
257static char *
258describe_type(char *dst, shader_source const *src, unsigned type)
259{
260 auto type_def_it = src->type_def_index.find(type);
261
262 if (type_def_it == src->type_def_index.end()) {
263 return dst + sprintf(dst, "undef");
264 }
265
266 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
267 unsigned opcode = code[0] & 0x0ffffu;
268 switch (opcode) {
269 case spv::OpTypeBool:
270 return dst + sprintf(dst, "bool");
271 case spv::OpTypeInt:
272 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
273 case spv::OpTypeFloat:
274 return dst + sprintf(dst, "float%d", code[2]);
275 case spv::OpTypeVector:
276 dst += sprintf(dst, "vec%d of ", code[3]);
277 return describe_type(dst, src, code[2]);
278 case spv::OpTypeMatrix:
279 dst += sprintf(dst, "mat%d of ", code[3]);
280 return describe_type(dst, src, code[2]);
281 case spv::OpTypeArray:
282 dst += sprintf(dst, "arr[%d] of ", code[3]);
283 return describe_type(dst, src, code[2]);
284 case spv::OpTypePointer:
285 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
286 return describe_type(dst, src, code[3]);
287 case spv::OpTypeStruct:
288 {
289 unsigned oplen = code[0] >> 16;
290 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600291 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200292 dst = describe_type(dst, src, code[i]);
293 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
294 }
295 return dst;
296 }
297 default:
298 return dst + sprintf(dst, "oddtype");
299 }
300}
301
302
303static bool
304types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type)
305{
306 auto a_type_def_it = a->type_def_index.find(a_type);
307 auto b_type_def_it = b->type_def_index.find(b_type);
308
309 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200310 return false;
311 }
312
313 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200314 return false;
315 }
316
317 /* walk two type trees together, and complain about differences */
318 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
319 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
320
321 unsigned a_opcode = a_code[0] & 0x0ffffu;
322 unsigned b_opcode = b_code[0] & 0x0ffffu;
323
324 if (a_opcode != b_opcode) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200325 return false;
326 }
327
328 switch (a_opcode) {
329 case spv::OpTypeBool:
330 return true;
331 case spv::OpTypeInt:
332 /* match on width, signedness */
333 return a_code[2] == b_code[2] && a_code[3] == b_code[3];
334 case spv::OpTypeFloat:
335 /* match on width */
336 return a_code[2] == b_code[2];
337 case spv::OpTypeVector:
338 case spv::OpTypeMatrix:
339 case spv::OpTypeArray:
340 /* match on element type, count. these all have the same layout */
341 return types_match(a, b, a_code[2], b_code[2]) && a_code[3] == b_code[3];
342 case spv::OpTypeStruct:
343 /* match on all element types */
344 {
345 unsigned a_len = a_code[0] >> 16;
346 unsigned b_len = b_code[0] >> 16;
347
348 if (a_len != b_len) {
349 return false; /* structs cannot match if member counts differ */
350 }
351
Ian Elliott1cb62222015-04-17 11:05:04 -0600352 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200353 if (!types_match(a, b, a_code[i], b_code[i])) {
354 return false;
355 }
356 }
357
358 return true;
359 }
360 case spv::OpTypePointer:
361 /* match on pointee type. storage class is expected to differ */
362 return types_match(a, b, a_code[3], b_code[3]);
363
364 default:
365 /* remaining types are CLisms, or may not appear in the interfaces we
366 * are interested in. Just claim no match.
367 */
368 return false;
369
370 }
371}
372
373
Chris Forbes06e8fc32015-04-13 12:14:52 +1200374static int
375value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
376{
377 auto it = map.find(id);
378 if (it == map.end())
379 return def;
380 else
381 return it->second;
382}
383
384
385struct interface_var {
386 uint32_t id;
387 uint32_t type_id;
388 /* TODO: collect the name, too? Isn't required to be present. */
389};
390
391
392static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600393collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200394 std::map<uint32_t, interface_var> &out,
395 std::map<uint32_t, interface_var> &builtins_out)
396{
397 unsigned int const *code = (unsigned int const *)&src->words[0];
398 size_t size = src->words.size();
399
400 if (code[0] != spv::MagicNumber) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200401 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
402 "Shader is not SPIR-V, unable to extract interface");
Chris Forbes06e8fc32015-04-13 12:14:52 +1200403 return;
404 }
405
406 std::unordered_map<unsigned, unsigned> var_locations;
407 std::unordered_map<unsigned, unsigned> var_builtins;
408
409 unsigned word = 5;
410 while (word < size) {
411
412 unsigned opcode = code[word] & 0x0ffffu;
413 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
414
415 /* We consider two interface models: SSO rendezvous-by-location, and
416 * builtins. Complain about anything that fits neither model.
417 */
418 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600419 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200420 var_locations[code[word+1]] = code[word+3];
421 }
422
Cody Northrop97e52d82015-04-20 14:09:40 -0600423 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200424 var_builtins[code[word+1]] = code[word+3];
425 }
426 }
427
428 /* TODO: handle grouped decorations */
429 /* TODO: handle index=1 dual source outputs from FS -- two vars will
430 * have the same location, and we DONT want to clobber. */
431
Ian Elliott1cb62222015-04-17 11:05:04 -0600432 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200433 int location = value_or_default(var_locations, code[word+2], -1);
434 int builtin = value_or_default(var_builtins, code[word+2], -1);
435
436 if (location == -1 && builtin == -1) {
437 /* No location defined, and not bound to an API builtin.
438 * The spec says nothing about how this case works (or doesn't)
439 * for interface matching.
440 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200441 char str[1024];
442 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600443 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes6b2ead62015-04-17 10:13:28 +1200444 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200445 }
446 else if (location != -1) {
447 /* A user-defined interface variable, with a location. */
448 interface_var v;
449 v.id = code[word+2];
450 v.type_id = code[word+1];
451 out[location] = v;
452 }
453 else {
454 /* A builtin interface variable */
455 interface_var v;
456 v.id = code[word+2];
457 v.type_id = code[word+1];
458 builtins_out[builtin] = v;
459 }
460 }
461
462 word += oplen;
463 }
464}
465
466
Chris Forbes2778f302015-04-02 13:22:31 +1300467VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
468 VkShader *pShader)
469{
Chris Forbes7f963832015-05-29 14:55:18 +1200470 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300471 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
472 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200473
474 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes7f963832015-05-29 14:55:18 +1200475 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300476 return res;
477}
478
479
Chris Forbesee99b9b2015-05-25 11:13:22 +1200480static bool
Chris Forbes41002452015-04-08 10:19:16 +1200481validate_interface_between_stages(shader_source const *producer, char const *producer_name,
482 shader_source const *consumer, char const *consumer_name)
483{
484 std::map<uint32_t, interface_var> outputs;
485 std::map<uint32_t, interface_var> inputs;
486
487 std::map<uint32_t, interface_var> builtin_outputs;
488 std::map<uint32_t, interface_var> builtin_inputs;
489
Chris Forbes6b2ead62015-04-17 10:13:28 +1200490 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200491 bool pass = true;
Chris Forbes41002452015-04-08 10:19:16 +1200492
Cody Northrop97e52d82015-04-20 14:09:40 -0600493 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
494 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200495
496 auto a_it = outputs.begin();
497 auto b_it = inputs.begin();
498
499 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600500 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
501 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
502 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
503 auto a_first = (outputs.size() > 0 ? a_it->first : 0);
504 auto b_first = (inputs.size() > 0 ? b_it->first : 0);
505
506 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200507 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600508 producer_name, a_first, consumer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200509 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200510 a_it++;
511 }
David Pinedod8f83d82015-04-27 16:36:17 -0600512 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200513 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600514 consumer_name, b_first, producer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200515 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200516 pass = false;
Chris Forbes41002452015-04-08 10:19:16 +1200517 b_it++;
518 }
519 else {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200520 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200521 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200522 }
523 else {
524 char producer_type[1024];
525 char consumer_type[1024];
526 describe_type(producer_type, producer, a_it->second.type_id);
527 describe_type(consumer_type, consumer, b_it->second.type_id);
528
Chris Forbes6b2ead62015-04-17 10:13:28 +1200529 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200530 producer_type, consumer_type);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200531 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200532 pass = false;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200533 }
Chris Forbes41002452015-04-08 10:19:16 +1200534 a_it++;
535 b_it++;
536 }
537 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200538
539 return pass;
Chris Forbes41002452015-04-08 10:19:16 +1200540}
541
542
Chris Forbes3616b462015-04-08 10:37:20 +1200543enum FORMAT_TYPE {
544 FORMAT_TYPE_UNDEFINED,
545 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
546 FORMAT_TYPE_SINT,
547 FORMAT_TYPE_UINT,
548};
549
550
551static unsigned
552get_format_type(VkFormat fmt) {
553 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800554 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200555 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800556 case VK_FORMAT_R8_SINT:
557 case VK_FORMAT_R8G8_SINT:
558 case VK_FORMAT_R8G8B8_SINT:
559 case VK_FORMAT_R8G8B8A8_SINT:
560 case VK_FORMAT_R16_SINT:
561 case VK_FORMAT_R16G16_SINT:
562 case VK_FORMAT_R16G16B16_SINT:
563 case VK_FORMAT_R16G16B16A16_SINT:
564 case VK_FORMAT_R32_SINT:
565 case VK_FORMAT_R32G32_SINT:
566 case VK_FORMAT_R32G32B32_SINT:
567 case VK_FORMAT_R32G32B32A32_SINT:
568 case VK_FORMAT_B8G8R8_SINT:
569 case VK_FORMAT_B8G8R8A8_SINT:
570 case VK_FORMAT_R10G10B10A2_SINT:
571 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200572 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800573 case VK_FORMAT_R8_UINT:
574 case VK_FORMAT_R8G8_UINT:
575 case VK_FORMAT_R8G8B8_UINT:
576 case VK_FORMAT_R8G8B8A8_UINT:
577 case VK_FORMAT_R16_UINT:
578 case VK_FORMAT_R16G16_UINT:
579 case VK_FORMAT_R16G16B16_UINT:
580 case VK_FORMAT_R16G16B16A16_UINT:
581 case VK_FORMAT_R32_UINT:
582 case VK_FORMAT_R32G32_UINT:
583 case VK_FORMAT_R32G32B32_UINT:
584 case VK_FORMAT_R32G32B32A32_UINT:
585 case VK_FORMAT_B8G8R8_UINT:
586 case VK_FORMAT_B8G8R8A8_UINT:
587 case VK_FORMAT_R10G10B10A2_UINT:
588 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200589 return FORMAT_TYPE_UINT;
590 default:
591 return FORMAT_TYPE_FLOAT;
592 }
593}
594
595
Chris Forbes156a1162015-05-04 14:04:06 +1200596/* characterizes a SPIR-V type appearing in an interface to a FF stage,
597 * for comparison to a VkFormat's characterization above. */
598static unsigned
599get_fundamental_type(shader_source const *src, unsigned type)
600{
601 auto type_def_it = src->type_def_index.find(type);
602
603 if (type_def_it == src->type_def_index.end()) {
604 return FORMAT_TYPE_UNDEFINED;
605 }
606
607 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
608 unsigned opcode = code[0] & 0x0ffffu;
609 switch (opcode) {
610 case spv::OpTypeInt:
611 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
612 case spv::OpTypeFloat:
613 return FORMAT_TYPE_FLOAT;
614 case spv::OpTypeVector:
615 return get_fundamental_type(src, code[2]);
616 case spv::OpTypeMatrix:
617 return get_fundamental_type(src, code[2]);
618 case spv::OpTypeArray:
619 return get_fundamental_type(src, code[2]);
620 case spv::OpTypePointer:
621 return get_fundamental_type(src, code[3]);
622 default:
623 return FORMAT_TYPE_UNDEFINED;
624 }
625}
626
627
Chris Forbesee99b9b2015-05-25 11:13:22 +1200628static bool
Chris Forbes772d03b2015-04-08 10:36:37 +1200629validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
630{
631 std::map<uint32_t, interface_var> inputs;
632 /* we collect builtin inputs, but they will never appear in the VI state --
633 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
634 */
635 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200636 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200637 bool pass = true;
Chris Forbes772d03b2015-04-08 10:36:37 +1200638
Cody Northrop97e52d82015-04-20 14:09:40 -0600639 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200640
641 /* Build index by location */
642 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes7191cd52015-05-25 11:13:24 +1200643 if (vi) {
644 for (unsigned i = 0; i < vi->attributeCount; i++)
645 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
646 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200647
648 auto it_a = attribs.begin();
649 auto it_b = inputs.begin();
650
David Pinedod8f83d82015-04-27 16:36:17 -0600651 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
652 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
653 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
654 auto a_first = (attribs.size() > 0 ? it_a->first : 0);
655 auto b_first = (inputs.size() > 0 ? it_b->first : 0);
656 if (b_at_end || a_first < b_first) {
657 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200658 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200659 it_a++;
660 }
David Pinedod8f83d82015-04-27 16:36:17 -0600661 else if (a_at_end || b_first < a_first) {
662 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200663 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200664 pass = false;
Chris Forbes772d03b2015-04-08 10:36:37 +1200665 it_b++;
666 }
667 else {
Chris Forbes401784b2015-05-04 14:04:24 +1200668 unsigned attrib_type = get_format_type(it_a->second->format);
669 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
670
671 /* type checking */
672 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
673 char vs_type[1024];
674 describe_type(vs_type, vs, it_b->second.type_id);
675 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
676 string_VkFormat(it_a->second->format), a_first, vs_type);
677 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200678 pass = false;
Chris Forbes401784b2015-05-04 14:04:24 +1200679 }
680
Chris Forbes6b2ead62015-04-17 10:13:28 +1200681 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200682 it_a++;
683 it_b++;
684 }
685 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200686
687 return pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200688}
689
690
Chris Forbesee99b9b2015-05-25 11:13:22 +1200691static bool
Chris Forbes3616b462015-04-08 10:37:20 +1200692validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
693{
694 std::map<uint32_t, interface_var> outputs;
695 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200696 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200697 bool pass = true;
Chris Forbes3616b462015-04-08 10:37:20 +1200698
699 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
700
Cody Northrop97e52d82015-04-20 14:09:40 -0600701 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200702
703 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
704 * and all color attachment should be UNORM/SNORM/FLOAT.
705 */
706 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes3616b462015-04-08 10:37:20 +1200707 if (outputs.size()) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200708 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
709 "Should not have user-defined FS outputs when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200710 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200711 }
712
Ian Elliott1cb62222015-04-17 11:05:04 -0600713 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200714 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
715 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200716 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
717 "CB format should not be SINT or UINT when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200718 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200719 }
720 }
721
Chris Forbesee99b9b2015-05-25 11:13:22 +1200722 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200723 }
724
725 auto it = outputs.begin();
726 uint32_t attachment = 0;
727
728 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
729 * are currently dense, but the parallel with matching between shader stages is nice.
730 */
731
Chris Forbesbf2b1d22015-05-05 11:34:14 +1200732 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan3a22ce92015-06-01 19:48:11 +0200733 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200734 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
735 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200736 it++;
737 }
738 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200739 sprintf(str, "Attachment %d not written by FS", attachment);
740 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200741 attachment++;
Chris Forbesee99b9b2015-05-25 11:13:22 +1200742 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200743 }
744 else {
Chris Forbes46d31e52015-05-04 14:20:10 +1200745 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
746 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
747
748 /* type checking */
749 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
750 char fs_type[1024];
751 describe_type(fs_type, fs, it->second.type_id);
752 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
753 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
754 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200755 pass = false;
Chris Forbes46d31e52015-05-04 14:20:10 +1200756 }
757
Chris Forbes6b2ead62015-04-17 10:13:28 +1200758 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200759 it++;
760 attachment++;
761 }
762 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200763
764 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200765}
766
767
Chris Forbes81874ba2015-06-04 20:23:00 +1200768static bool
769validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes4175e6f2015-04-08 10:15:35 +1200770{
Chris Forbes6b2ead62015-04-17 10:13:28 +1200771 /* TODO: run cross-stage validation for GS, TCS, TES stages */
Chris Forbes4175e6f2015-04-08 10:15:35 +1200772
Chris Forbesf6800b52015-04-08 10:16:45 +1200773 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
774 * before trying to do anything more: */
775
776 shader_source const *vs_source = 0;
777 shader_source const *fs_source = 0;
778 VkPipelineCbStateCreateInfo const *cb = 0;
779 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200780 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200781 bool pass = true;
Chris Forbesf6800b52015-04-08 10:16:45 +1200782
Chris Forbes7f963832015-05-29 14:55:18 +1200783 loader_platform_thread_lock_mutex(&globalLock);
784
Chris Forbesf6800b52015-04-08 10:16:45 +1200785 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
786 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
787 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
788
Chris Forbes6b2ead62015-04-17 10:13:28 +1200789 if (shader_stage->shader.stage == VK_SHADER_STAGE_VERTEX) {
Chris Forbesf6800b52015-04-08 10:16:45 +1200790 vs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes6b2ead62015-04-17 10:13:28 +1200791 }
792 else if (shader_stage->shader.stage == VK_SHADER_STAGE_FRAGMENT) {
Chris Forbesf6800b52015-04-08 10:16:45 +1200793 fs_source = shader_map[(void *)(shader_stage->shader.shader)];
Chris Forbes6b2ead62015-04-17 10:13:28 +1200794 }
795 else {
796 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
797 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
798 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200799 }
800 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
801 cb = (VkPipelineCbStateCreateInfo const *)stage;
802 }
803 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
804 vi = (VkPipelineVertexInputCreateInfo const *)stage;
805 }
806 }
807
Chris Forbes6b2ead62015-04-17 10:13:28 +1200808 sprintf(str, "Pipeline: vi=%p vs=%p fs=%p cb=%p\n", vi, vs_source, fs_source, cb);
809 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NONE, "SC", str);
Chris Forbesf6800b52015-04-08 10:16:45 +1200810
Chris Forbes7191cd52015-05-25 11:13:24 +1200811 if (vs_source) {
Chris Forbesee99b9b2015-05-25 11:13:22 +1200812 pass = validate_vi_against_vs_inputs(vi, vs_source) && pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200813 }
814
Chris Forbes41002452015-04-08 10:19:16 +1200815 if (vs_source && fs_source) {
Chris Forbesee99b9b2015-05-25 11:13:22 +1200816 pass = validate_interface_between_stages(vs_source, "vertex shader",
817 fs_source, "fragment shader") && pass;
Chris Forbes41002452015-04-08 10:19:16 +1200818 }
819
Chris Forbes3616b462015-04-08 10:37:20 +1200820 if (fs_source && cb) {
Chris Forbesee99b9b2015-05-25 11:13:22 +1200821 pass = validate_fs_outputs_against_cb(fs_source, cb) && pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200822 }
823
Chris Forbes7f963832015-05-29 14:55:18 +1200824 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes81874ba2015-06-04 20:23:00 +1200825 return pass;
826}
827
828
829VK_LAYER_EXPORT VkResult VKAPI vkCreateGraphicsPipeline(VkDevice device,
830 const VkGraphicsPipelineCreateInfo *pCreateInfo,
831 VkPipeline *pPipeline)
832{
833 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200834
835 if (pass) {
836 /* The driver is allowed to crash if passed junk. Only actually create the
837 * pipeline if we didn't run into any showstoppers above.
838 */
Chris Forbes81874ba2015-06-04 20:23:00 +1200839 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200840 return pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
841 }
842 else {
843 return VK_ERROR_UNKNOWN;
844 }
Chris Forbes4175e6f2015-04-08 10:15:35 +1200845}
846
847
Chris Forbesdb467bd2015-05-25 11:12:59 +1200848VK_LAYER_EXPORT VkResult VKAPI vkDbgRegisterMsgCallback(
849 VkInstance instance,
850 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback,
851 void *pUserData)
852{
853 // This layer intercepts callbacks
854 VK_LAYER_DBG_FUNCTION_NODE *pNewDbgFuncNode = (VK_LAYER_DBG_FUNCTION_NODE*)malloc(sizeof(VK_LAYER_DBG_FUNCTION_NODE));
855 if (!pNewDbgFuncNode)
856 return VK_ERROR_OUT_OF_HOST_MEMORY;
857 pNewDbgFuncNode->pfnMsgCallback = pfnMsgCallback;
858 pNewDbgFuncNode->pUserData = pUserData;
859 pNewDbgFuncNode->pNext = g_pDbgFunctionHead;
860 g_pDbgFunctionHead = pNewDbgFuncNode;
861 // force callbacks if DebugAction hasn't been set already other than initial value
862 if (g_actionIsDefault) {
863 g_debugAction = VK_DBG_LAYER_ACTION_CALLBACK;
864 }
Chris Forbes7f963832015-05-29 14:55:18 +1200865 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
866 VkLayerDispatchTable *pTable = tableMap[pCurObj];
867 VkResult result = pTable->DbgRegisterMsgCallback(instance, pfnMsgCallback, pUserData);
Chris Forbesdb467bd2015-05-25 11:12:59 +1200868 return result;
869}
870
871VK_LAYER_EXPORT VkResult VKAPI vkDbgUnregisterMsgCallback(
872 VkInstance instance,
873 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback)
874{
875 VK_LAYER_DBG_FUNCTION_NODE *pInfo = g_pDbgFunctionHead;
876 VK_LAYER_DBG_FUNCTION_NODE *pPrev = pInfo;
877 while (pInfo) {
878 if (pInfo->pfnMsgCallback == pfnMsgCallback) {
879 pPrev->pNext = pInfo->pNext;
880 if (g_pDbgFunctionHead == pInfo) {
881 g_pDbgFunctionHead = pInfo->pNext;
882 }
883 free(pInfo);
884 break;
885 }
886 pPrev = pInfo;
887 pInfo = pInfo->pNext;
888 }
889 if (g_pDbgFunctionHead == NULL) {
890 if (g_actionIsDefault) {
891 g_debugAction = VK_DBG_LAYER_ACTION_LOG_MSG;
892 } else {
893 g_debugAction = (VK_LAYER_DBG_ACTION)(g_debugAction & ~((uint32_t)VK_DBG_LAYER_ACTION_CALLBACK));
894 }
895 }
Chris Forbes7f963832015-05-29 14:55:18 +1200896 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
897 VkLayerDispatchTable *pTable = tableMap[pCurObj];
898 VkResult result = pTable->DbgUnregisterMsgCallback(instance, pfnMsgCallback);
Chris Forbesdb467bd2015-05-25 11:12:59 +1200899 return result;
900}
901
902
Chia-I Wua3b9a202015-04-17 02:00:54 +0800903VK_LAYER_EXPORT void * VKAPI vkGetProcAddr(VkPhysicalDevice gpu, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +1300904{
905 if (gpu == NULL)
906 return NULL;
907
908 initLayerTable((const VkBaseLayerObject *) gpu);
909
Chris Forbesb6b8c462015-04-15 06:59:41 +1200910 loader_platform_thread_once(&g_initOnce, initLayer);
911
Chris Forbes2778f302015-04-02 13:22:31 +1300912#define ADD_HOOK(fn) \
913 if (!strncmp(#fn, pName, sizeof(#fn))) \
914 return (void *) fn
915
916 ADD_HOOK(vkGetProcAddr);
917 ADD_HOOK(vkEnumerateLayers);
918 ADD_HOOK(vkCreateDevice);
919 ADD_HOOK(vkCreateShader);
Chris Forbes4175e6f2015-04-08 10:15:35 +1200920 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbesdb467bd2015-05-25 11:12:59 +1200921 ADD_HOOK(vkDbgRegisterMsgCallback);
922 ADD_HOOK(vkDbgUnregisterMsgCallback);
Chris Forbes2778f302015-04-02 13:22:31 +1300923
924 VkBaseLayerObject* gpuw = (VkBaseLayerObject *) gpu;
925 if (gpuw->pGPA == NULL)
926 return NULL;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800927 return gpuw->pGPA((VkPhysicalDevice) gpuw->nextObject, pName);
Chris Forbes2778f302015-04-02 13:22:31 +1300928}