blob: 8de9239974b392bb1dcbb136e9f7fd7e643123fc [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes401784b2015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes7f720542015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130043
Chris Forbes2778f302015-04-02 13:22:31 +130044
Chris Forbesb6b8c462015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
Chris Forbes7f963832015-05-29 14:55:18 +120046static VkBaseLayerObject *pCurObj;
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -060047static std::unordered_map<void *, VkLayerInstanceDispatchTable *> tableInstanceMap;
Chris Forbesb6b8c462015-04-15 06:59:41 +120048static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes7f963832015-05-29 14:55:18 +120049// TODO : This can be much smarter, using separate locks for separate global data
50static int globalLockInitialized = 0;
51static loader_platform_thread_mutex globalLock;
Chris Forbes3b1c4212015-04-08 10:11:59 +120052
Chris Forbes3a5e99a2015-04-10 11:41:20 +120053
54static void
55build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
56{
57 unsigned int const *code = (unsigned int const *)&words[0];
58 size_t size = words.size();
59
60 unsigned word = 5;
61 while (word < size) {
62 unsigned opcode = code[word] & 0x0ffffu;
63 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
64
65 switch (opcode) {
66 case spv::OpTypeVoid:
67 case spv::OpTypeBool:
68 case spv::OpTypeInt:
69 case spv::OpTypeFloat:
70 case spv::OpTypeVector:
71 case spv::OpTypeMatrix:
72 case spv::OpTypeSampler:
73 case spv::OpTypeFilter:
74 case spv::OpTypeArray:
75 case spv::OpTypeRuntimeArray:
76 case spv::OpTypeStruct:
77 case spv::OpTypeOpaque:
78 case spv::OpTypePointer:
79 case spv::OpTypeFunction:
80 case spv::OpTypeEvent:
81 case spv::OpTypeDeviceEvent:
82 case spv::OpTypeReserveId:
83 case spv::OpTypeQueue:
84 case spv::OpTypePipe:
85 type_def_index[code[word+1]] = word;
86 break;
87
88 default:
89 /* We only care about type definitions */
90 break;
91 }
92
93 word += oplen;
94 }
95}
96
Chris Forbes3b1c4212015-04-08 10:11:59 +120097struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120098 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120099 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200100 /* a mapping of <id> to the first word of its def. this is useful because walking type
101 * trees requires jumping all over the instruction stream.
102 */
103 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbesf044ec92015-06-05 15:01:08 +1200104 bool is_spirv;
Chris Forbes3b1c4212015-04-08 10:11:59 +1200105
106 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbesf044ec92015-06-05 15:01:08 +1200107 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
108 type_def_index(),
109 is_spirv(true) {
110
111 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
112 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
113 "Shader is not SPIR-V, most checks will not be possible");
114 is_spirv = false;
115 return;
116 }
117
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200118
119 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200120 }
121};
122
123
124static std::unordered_map<void *, shader_source *> shader_map;
125
126
Chris Forbesb6b8c462015-04-15 06:59:41 +1200127static void
128initLayer()
129{
130 const char *strOpt;
131 // initialize ShaderChecker options
132 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportingLevel);
133 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
134
135 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
136 {
137 strOpt = getLayerOption("ShaderCheckerLogFilename");
138 if (strOpt)
139 {
140 g_logFile = fopen(strOpt, "w");
141 }
142 if (g_logFile == NULL)
143 g_logFile = stdout;
144 }
145}
146
147
Chris Forbes2778f302015-04-02 13:22:31 +1300148static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *gpuw)
149{
150 VkLayerDispatchTable *pTable;
151
152 assert(gpuw);
153 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) gpuw->baseObject);
154 if (it == tableMap.end())
155 {
156 pTable = new VkLayerDispatchTable;
157 tableMap[(void *) gpuw->baseObject] = pTable;
158 } else
159 {
160 return it->second;
161 }
162
Jon Ashburnf6b33db2015-05-05 14:22:52 -0600163 layer_initialize_dispatch_table(pTable, (PFN_vkGetProcAddr) gpuw->pGPA, (VkPhysicalDevice) gpuw->nextObject);
Chris Forbes2778f302015-04-02 13:22:31 +1300164
165 return pTable;
166}
167
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -0600168static VkLayerInstanceDispatchTable * initLayerInstanceTable(const VkBaseLayerObject *instw)
169{
170 VkLayerInstanceDispatchTable *pTable;
171
172 assert(instw);
173 std::unordered_map<void *, VkLayerInstanceDispatchTable *>::const_iterator it = tableInstanceMap.find((void *) instw->baseObject);
174 if (it == tableInstanceMap.end())
175 {
176 pTable = new VkLayerInstanceDispatchTable;
177 tableInstanceMap[(void *) instw->baseObject] = pTable;
178 } else
179 {
180 return it->second;
181 }
182
183 layer_init_instance_dispatch_table(pTable, (PFN_vkGetInstanceProcAddr) instw->pGPA, (VkInstance) instw->nextObject);
184
185 return pTable;
186}
Chris Forbes2778f302015-04-02 13:22:31 +1300187
Chia-I Wua3b9a202015-04-17 02:00:54 +0800188VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
Chris Forbes2778f302015-04-02 13:22:31 +1300189{
190 VkLayerDispatchTable* pTable = tableMap[gpu];
191 VkResult result = pTable->CreateDevice(gpu, pCreateInfo, pDevice);
Chris Forbesb6b8c462015-04-15 06:59:41 +1200192
193 loader_platform_thread_once(&g_initOnce, initLayer);
Chris Forbes2778f302015-04-02 13:22:31 +1300194 // create a mapping for the device object into the dispatch table
195 tableMap.emplace(*pDevice, pTable);
Chris Forbes7f963832015-05-29 14:55:18 +1200196 pCurObj = (VkBaseLayerObject *) *pDevice;
Chris Forbes2778f302015-04-02 13:22:31 +1300197 return result;
198}
199
200
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600201VK_LAYER_EXPORT VkResult VKAPI vkEnumerateLayers(VkPhysicalDevice physicalDevice, size_t maxStringSize, size_t* pLayerCount, char* const* pOutLayers, void* pReserved)
Chris Forbes2778f302015-04-02 13:22:31 +1300202{
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600203 if (pLayerCount == NULL || pOutLayers == NULL || pOutLayers[0] == NULL || pOutLayers[1] == NULL || pReserved == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300204 return VK_ERROR_INVALID_POINTER;
205
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600206 if (*pLayerCount < 1)
Chris Forbes2778f302015-04-02 13:22:31 +1300207 return VK_ERROR_INITIALIZATION_FAILED;
Courtney Goeltzenleuchterd9dc0c72015-04-20 11:04:54 -0600208 *pLayerCount = 1;
Chris Forbes2778f302015-04-02 13:22:31 +1300209 strncpy((char *) pOutLayers[0], "ShaderChecker", maxStringSize);
210 return VK_SUCCESS;
211}
212
213
214struct extProps {
215 uint32_t version;
216 const char * const name;
217};
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600218#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Chris Forbes2778f302015-04-02 13:22:31 +1300219static const struct extProps shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
220 // TODO what is the version?
221 0x10, "ShaderChecker",
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600222 0x10, "Validation",
Chris Forbes2778f302015-04-02 13:22:31 +1300223};
224
Chris Forbes2778f302015-04-02 13:22:31 +1300225VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
226 VkExtensionInfoType infoType,
227 uint32_t extensionIndex,
228 size_t* pDataSize,
229 void* pData)
230{
Chris Forbes2778f302015-04-02 13:22:31 +1300231 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
232 VkExtensionProperties *ext_props;
233 uint32_t *count;
234
235 if (pDataSize == NULL)
236 return VK_ERROR_INVALID_POINTER;
237
238 switch (infoType) {
239 case VK_EXTENSION_INFO_TYPE_COUNT:
240 *pDataSize = sizeof(uint32_t);
241 if (pData == NULL)
242 return VK_SUCCESS;
243 count = (uint32_t *) pData;
244 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
245 break;
246 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
247 *pDataSize = sizeof(VkExtensionProperties);
248 if (pData == NULL)
249 return VK_SUCCESS;
250 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
251 return VK_ERROR_INVALID_VALUE;
252 ext_props = (VkExtensionProperties *) pData;
253 ext_props->version = shaderCheckerExts[extensionIndex].version;
254 strncpy(ext_props->extName, shaderCheckerExts[extensionIndex].name,
255 VK_MAX_EXTENSION_NAME);
256 ext_props->extName[VK_MAX_EXTENSION_NAME - 1] = '\0';
257 break;
258 default:
259 return VK_ERROR_INVALID_VALUE;
260 };
261
262 return VK_SUCCESS;
263}
264
265
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200266static char const *
267storage_class_name(unsigned sc)
268{
269 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600270 case spv::StorageClassInput: return "input";
271 case spv::StorageClassOutput: return "output";
272 case spv::StorageClassUniformConstant: return "const uniform";
273 case spv::StorageClassUniform: return "uniform";
274 case spv::StorageClassWorkgroupLocal: return "workgroup local";
275 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
276 case spv::StorageClassPrivateGlobal: return "private global";
277 case spv::StorageClassFunction: return "function";
278 case spv::StorageClassGeneric: return "generic";
279 case spv::StorageClassPrivate: return "private";
280 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200281 default: return "unknown";
282 }
283}
284
285
286/* returns ptr to null terminator */
287static char *
288describe_type(char *dst, shader_source const *src, unsigned type)
289{
290 auto type_def_it = src->type_def_index.find(type);
291
292 if (type_def_it == src->type_def_index.end()) {
293 return dst + sprintf(dst, "undef");
294 }
295
296 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
297 unsigned opcode = code[0] & 0x0ffffu;
298 switch (opcode) {
299 case spv::OpTypeBool:
300 return dst + sprintf(dst, "bool");
301 case spv::OpTypeInt:
302 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
303 case spv::OpTypeFloat:
304 return dst + sprintf(dst, "float%d", code[2]);
305 case spv::OpTypeVector:
306 dst += sprintf(dst, "vec%d of ", code[3]);
307 return describe_type(dst, src, code[2]);
308 case spv::OpTypeMatrix:
309 dst += sprintf(dst, "mat%d of ", code[3]);
310 return describe_type(dst, src, code[2]);
311 case spv::OpTypeArray:
312 dst += sprintf(dst, "arr[%d] of ", code[3]);
313 return describe_type(dst, src, code[2]);
314 case spv::OpTypePointer:
315 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
316 return describe_type(dst, src, code[3]);
317 case spv::OpTypeStruct:
318 {
319 unsigned oplen = code[0] >> 16;
320 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600321 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200322 dst = describe_type(dst, src, code[i]);
323 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
324 }
325 return dst;
326 }
327 default:
328 return dst + sprintf(dst, "oddtype");
329 }
330}
331
332
333static bool
Chris Forbesf3fc0332015-06-05 14:57:05 +1200334types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200335{
336 auto a_type_def_it = a->type_def_index.find(a_type);
337 auto b_type_def_it = b->type_def_index.find(b_type);
338
339 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200340 return false;
341 }
342
343 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200344 return false;
345 }
346
347 /* walk two type trees together, and complain about differences */
348 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
349 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
350
351 unsigned a_opcode = a_code[0] & 0x0ffffu;
352 unsigned b_opcode = b_code[0] & 0x0ffffu;
353
Chris Forbesf3fc0332015-06-05 14:57:05 +1200354 if (b_arrayed && b_opcode == spv::OpTypeArray) {
355 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
356 return types_match(a, b, a_type, b_code[2], false);
357 }
358
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200359 if (a_opcode != b_opcode) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200360 return false;
361 }
362
363 switch (a_opcode) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200364 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200365 case spv::OpTypeBool:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200366 return true && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200367 case spv::OpTypeInt:
368 /* match on width, signedness */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200369 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200370 case spv::OpTypeFloat:
371 /* match on width */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200372 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200373 case spv::OpTypeVector:
374 case spv::OpTypeMatrix:
375 case spv::OpTypeArray:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200376 /* match on element type, count. these all have the same layout. we don't get here if
377 * b_arrayed -- that is handled above. */
378 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200379 case spv::OpTypeStruct:
380 /* match on all element types */
381 {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200382 if (b_arrayed) {
383 /* for the purposes of matching different levels of arrayness, structs are leaves. */
384 return false;
385 }
386
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200387 unsigned a_len = a_code[0] >> 16;
388 unsigned b_len = b_code[0] >> 16;
389
390 if (a_len != b_len) {
391 return false; /* structs cannot match if member counts differ */
392 }
393
Ian Elliott1cb62222015-04-17 11:05:04 -0600394 for (unsigned i = 2; i < a_len; i++) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200395 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200396 return false;
397 }
398 }
399
400 return true;
401 }
402 case spv::OpTypePointer:
403 /* match on pointee type. storage class is expected to differ */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200404 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200405
406 default:
407 /* remaining types are CLisms, or may not appear in the interfaces we
408 * are interested in. Just claim no match.
409 */
410 return false;
411
412 }
413}
414
415
Chris Forbes06e8fc32015-04-13 12:14:52 +1200416static int
417value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
418{
419 auto it = map.find(id);
420 if (it == map.end())
421 return def;
422 else
423 return it->second;
424}
425
426
427struct interface_var {
428 uint32_t id;
429 uint32_t type_id;
430 /* TODO: collect the name, too? Isn't required to be present. */
431};
432
433
434static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600435collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200436 std::map<uint32_t, interface_var> &out,
437 std::map<uint32_t, interface_var> &builtins_out)
438{
439 unsigned int const *code = (unsigned int const *)&src->words[0];
440 size_t size = src->words.size();
441
Chris Forbes06e8fc32015-04-13 12:14:52 +1200442 std::unordered_map<unsigned, unsigned> var_locations;
443 std::unordered_map<unsigned, unsigned> var_builtins;
444
445 unsigned word = 5;
446 while (word < size) {
447
448 unsigned opcode = code[word] & 0x0ffffu;
449 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
450
451 /* We consider two interface models: SSO rendezvous-by-location, and
452 * builtins. Complain about anything that fits neither model.
453 */
454 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600455 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200456 var_locations[code[word+1]] = code[word+3];
457 }
458
Cody Northrop97e52d82015-04-20 14:09:40 -0600459 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200460 var_builtins[code[word+1]] = code[word+3];
461 }
462 }
463
464 /* TODO: handle grouped decorations */
465 /* TODO: handle index=1 dual source outputs from FS -- two vars will
466 * have the same location, and we DONT want to clobber. */
467
Ian Elliott1cb62222015-04-17 11:05:04 -0600468 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200469 int location = value_or_default(var_locations, code[word+2], -1);
470 int builtin = value_or_default(var_builtins, code[word+2], -1);
471
472 if (location == -1 && builtin == -1) {
473 /* No location defined, and not bound to an API builtin.
474 * The spec says nothing about how this case works (or doesn't)
475 * for interface matching.
476 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200477 char str[1024];
478 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600479 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes6b2ead62015-04-17 10:13:28 +1200480 layerCbMsg(VK_DBG_MSG_UNKNOWN, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200481 }
482 else if (location != -1) {
483 /* A user-defined interface variable, with a location. */
484 interface_var v;
485 v.id = code[word+2];
486 v.type_id = code[word+1];
487 out[location] = v;
488 }
489 else {
490 /* A builtin interface variable */
491 interface_var v;
492 v.id = code[word+2];
493 v.type_id = code[word+1];
494 builtins_out[builtin] = v;
495 }
496 }
497
498 word += oplen;
499 }
500}
501
502
Chris Forbes2778f302015-04-02 13:22:31 +1300503VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
504 VkShader *pShader)
505{
Chris Forbes7f963832015-05-29 14:55:18 +1200506 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300507 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
508 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200509
510 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes7f963832015-05-29 14:55:18 +1200511 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300512 return res;
513}
514
515
Chris Forbesee99b9b2015-05-25 11:13:22 +1200516static bool
Chris Forbes41002452015-04-08 10:19:16 +1200517validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbesf044ec92015-06-05 15:01:08 +1200518 shader_source const *consumer, char const *consumer_name,
519 bool consumer_arrayed_input)
Chris Forbes41002452015-04-08 10:19:16 +1200520{
521 std::map<uint32_t, interface_var> outputs;
522 std::map<uint32_t, interface_var> inputs;
523
524 std::map<uint32_t, interface_var> builtin_outputs;
525 std::map<uint32_t, interface_var> builtin_inputs;
526
Chris Forbes6b2ead62015-04-17 10:13:28 +1200527 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200528 bool pass = true;
Chris Forbes41002452015-04-08 10:19:16 +1200529
Cody Northrop97e52d82015-04-20 14:09:40 -0600530 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
531 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200532
533 auto a_it = outputs.begin();
534 auto b_it = inputs.begin();
535
536 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600537 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
538 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
539 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200540 auto a_first = a_at_end ? 0 : a_it->first;
541 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600542
543 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200544 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600545 producer_name, a_first, consumer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200546 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200547 a_it++;
548 }
David Pinedod8f83d82015-04-27 16:36:17 -0600549 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200550 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600551 consumer_name, b_first, producer_name);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200552 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200553 pass = false;
Chris Forbes41002452015-04-08 10:19:16 +1200554 b_it++;
555 }
556 else {
Chris Forbesf044ec92015-06-05 15:01:08 +1200557 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200558 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200559 }
560 else {
561 char producer_type[1024];
562 char consumer_type[1024];
563 describe_type(producer_type, producer, a_it->second.type_id);
564 describe_type(consumer_type, consumer, b_it->second.type_id);
565
Chris Forbes6b2ead62015-04-17 10:13:28 +1200566 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200567 producer_type, consumer_type);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200568 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200569 pass = false;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200570 }
Chris Forbes41002452015-04-08 10:19:16 +1200571 a_it++;
572 b_it++;
573 }
574 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200575
576 return pass;
Chris Forbes41002452015-04-08 10:19:16 +1200577}
578
579
Chris Forbes3616b462015-04-08 10:37:20 +1200580enum FORMAT_TYPE {
581 FORMAT_TYPE_UNDEFINED,
582 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
583 FORMAT_TYPE_SINT,
584 FORMAT_TYPE_UINT,
585};
586
587
588static unsigned
589get_format_type(VkFormat fmt) {
590 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800591 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200592 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800593 case VK_FORMAT_R8_SINT:
594 case VK_FORMAT_R8G8_SINT:
595 case VK_FORMAT_R8G8B8_SINT:
596 case VK_FORMAT_R8G8B8A8_SINT:
597 case VK_FORMAT_R16_SINT:
598 case VK_FORMAT_R16G16_SINT:
599 case VK_FORMAT_R16G16B16_SINT:
600 case VK_FORMAT_R16G16B16A16_SINT:
601 case VK_FORMAT_R32_SINT:
602 case VK_FORMAT_R32G32_SINT:
603 case VK_FORMAT_R32G32B32_SINT:
604 case VK_FORMAT_R32G32B32A32_SINT:
605 case VK_FORMAT_B8G8R8_SINT:
606 case VK_FORMAT_B8G8R8A8_SINT:
607 case VK_FORMAT_R10G10B10A2_SINT:
608 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200609 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800610 case VK_FORMAT_R8_UINT:
611 case VK_FORMAT_R8G8_UINT:
612 case VK_FORMAT_R8G8B8_UINT:
613 case VK_FORMAT_R8G8B8A8_UINT:
614 case VK_FORMAT_R16_UINT:
615 case VK_FORMAT_R16G16_UINT:
616 case VK_FORMAT_R16G16B16_UINT:
617 case VK_FORMAT_R16G16B16A16_UINT:
618 case VK_FORMAT_R32_UINT:
619 case VK_FORMAT_R32G32_UINT:
620 case VK_FORMAT_R32G32B32_UINT:
621 case VK_FORMAT_R32G32B32A32_UINT:
622 case VK_FORMAT_B8G8R8_UINT:
623 case VK_FORMAT_B8G8R8A8_UINT:
624 case VK_FORMAT_R10G10B10A2_UINT:
625 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200626 return FORMAT_TYPE_UINT;
627 default:
628 return FORMAT_TYPE_FLOAT;
629 }
630}
631
632
Chris Forbes156a1162015-05-04 14:04:06 +1200633/* characterizes a SPIR-V type appearing in an interface to a FF stage,
634 * for comparison to a VkFormat's characterization above. */
635static unsigned
636get_fundamental_type(shader_source const *src, unsigned type)
637{
638 auto type_def_it = src->type_def_index.find(type);
639
640 if (type_def_it == src->type_def_index.end()) {
641 return FORMAT_TYPE_UNDEFINED;
642 }
643
644 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
645 unsigned opcode = code[0] & 0x0ffffu;
646 switch (opcode) {
647 case spv::OpTypeInt:
648 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
649 case spv::OpTypeFloat:
650 return FORMAT_TYPE_FLOAT;
651 case spv::OpTypeVector:
652 return get_fundamental_type(src, code[2]);
653 case spv::OpTypeMatrix:
654 return get_fundamental_type(src, code[2]);
655 case spv::OpTypeArray:
656 return get_fundamental_type(src, code[2]);
657 case spv::OpTypePointer:
658 return get_fundamental_type(src, code[3]);
659 default:
660 return FORMAT_TYPE_UNDEFINED;
661 }
662}
663
664
Chris Forbesee99b9b2015-05-25 11:13:22 +1200665static bool
Chris Forbes280ba2c2015-06-12 11:16:41 +1200666validate_vi_consistency(VkPipelineVertexInputCreateInfo const *vi)
667{
668 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
669 * each binding should be specified only once.
670 */
671 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
672 char str[1024];
673 bool pass = true;
674
675 for (unsigned i = 0; i < vi->bindingCount; i++) {
676 auto desc = &vi->pVertexBindingDescriptions[i];
677 auto & binding = bindings[desc->binding];
678 if (binding) {
679 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
680 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
681 pass = false;
682 }
683 else {
684 binding = desc;
685 }
686 }
687
688 return pass;
689}
690
691
692static bool
Chris Forbes772d03b2015-04-08 10:36:37 +1200693validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
694{
695 std::map<uint32_t, interface_var> inputs;
696 /* we collect builtin inputs, but they will never appear in the VI state --
697 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
698 */
699 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200700 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200701 bool pass = true;
Chris Forbes772d03b2015-04-08 10:36:37 +1200702
Cody Northrop97e52d82015-04-20 14:09:40 -0600703 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200704
705 /* Build index by location */
706 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes7191cd52015-05-25 11:13:24 +1200707 if (vi) {
708 for (unsigned i = 0; i < vi->attributeCount; i++)
709 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
710 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200711
712 auto it_a = attribs.begin();
713 auto it_b = inputs.begin();
714
David Pinedod8f83d82015-04-27 16:36:17 -0600715 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
716 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
717 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200718 auto a_first = a_at_end ? 0 : it_a->first;
719 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600720 if (b_at_end || a_first < b_first) {
721 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200722 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200723 it_a++;
724 }
David Pinedod8f83d82015-04-27 16:36:17 -0600725 else if (a_at_end || b_first < a_first) {
726 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200727 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200728 pass = false;
Chris Forbes772d03b2015-04-08 10:36:37 +1200729 it_b++;
730 }
731 else {
Chris Forbes401784b2015-05-04 14:04:24 +1200732 unsigned attrib_type = get_format_type(it_a->second->format);
733 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
734
735 /* type checking */
736 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
737 char vs_type[1024];
738 describe_type(vs_type, vs, it_b->second.type_id);
739 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
740 string_VkFormat(it_a->second->format), a_first, vs_type);
741 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200742 pass = false;
Chris Forbes401784b2015-05-04 14:04:24 +1200743 }
744
Chris Forbes6b2ead62015-04-17 10:13:28 +1200745 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200746 it_a++;
747 it_b++;
748 }
749 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200750
751 return pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200752}
753
754
Chris Forbesee99b9b2015-05-25 11:13:22 +1200755static bool
Chris Forbes3616b462015-04-08 10:37:20 +1200756validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
757{
758 std::map<uint32_t, interface_var> outputs;
759 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200760 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200761 bool pass = true;
Chris Forbes3616b462015-04-08 10:37:20 +1200762
763 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
764
Cody Northrop97e52d82015-04-20 14:09:40 -0600765 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200766
767 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
768 * and all color attachment should be UNORM/SNORM/FLOAT.
769 */
770 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes3616b462015-04-08 10:37:20 +1200771 if (outputs.size()) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200772 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
773 "Should not have user-defined FS outputs when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200774 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200775 }
776
Ian Elliott1cb62222015-04-17 11:05:04 -0600777 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200778 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
779 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200780 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
781 "CB format should not be SINT or UINT when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200782 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200783 }
784 }
785
Chris Forbesee99b9b2015-05-25 11:13:22 +1200786 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200787 }
788
789 auto it = outputs.begin();
790 uint32_t attachment = 0;
791
792 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
793 * are currently dense, but the parallel with matching between shader stages is nice.
794 */
795
Chris Forbesbf2b1d22015-05-05 11:34:14 +1200796 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan3a22ce92015-06-01 19:48:11 +0200797 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200798 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
799 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200800 it++;
801 }
802 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200803 sprintf(str, "Attachment %d not written by FS", attachment);
804 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200805 attachment++;
Chris Forbesee99b9b2015-05-25 11:13:22 +1200806 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200807 }
808 else {
Chris Forbes46d31e52015-05-04 14:20:10 +1200809 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
810 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
811
812 /* type checking */
813 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
814 char fs_type[1024];
815 describe_type(fs_type, fs, it->second.type_id);
816 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
817 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
818 layerCbMsg(VK_DBG_MSG_ERROR, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200819 pass = false;
Chris Forbes46d31e52015-05-04 14:20:10 +1200820 }
821
Chris Forbes6b2ead62015-04-17 10:13:28 +1200822 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200823 it++;
824 attachment++;
825 }
826 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200827
828 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200829}
830
831
Chris Forbesf044ec92015-06-05 15:01:08 +1200832struct shader_stage_attributes {
833 char const * const name;
834 bool arrayed_input;
835};
836
837
838static shader_stage_attributes
839shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
840 { "vertex shader", false },
841 { "tessellation control shader", true },
842 { "tessellation evaluation shader", false },
843 { "geometry shader", true },
844 { "fragment shader", false },
845};
846
847
Chris Forbes81874ba2015-06-04 20:23:00 +1200848static bool
849validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes4175e6f2015-04-08 10:15:35 +1200850{
Chris Forbesf6800b52015-04-08 10:16:45 +1200851 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
852 * before trying to do anything more: */
853
Chris Forbesf044ec92015-06-05 15:01:08 +1200854 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
855 memset(shaders, 0, sizeof(shaders));
Chris Forbesf6800b52015-04-08 10:16:45 +1200856 VkPipelineCbStateCreateInfo const *cb = 0;
857 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200858 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200859 bool pass = true;
Chris Forbesf6800b52015-04-08 10:16:45 +1200860
Chris Forbes7f963832015-05-29 14:55:18 +1200861 loader_platform_thread_lock_mutex(&globalLock);
862
Chris Forbesf6800b52015-04-08 10:16:45 +1200863 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
864 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
865 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
866
Chris Forbesf044ec92015-06-05 15:01:08 +1200867 if (shader_stage->shader.stage < VK_SHADER_STAGE_VERTEX || shader_stage->shader.stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200868 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
869 layerCbMsg(VK_DBG_MSG_WARNING, VK_VALIDATION_LEVEL_0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
870 }
Chris Forbesf044ec92015-06-05 15:01:08 +1200871 else {
872 shaders[shader_stage->shader.stage] = shader_map[(void *)(shader_stage->shader.shader)];
873 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200874 }
875 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
876 cb = (VkPipelineCbStateCreateInfo const *)stage;
877 }
878 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
879 vi = (VkPipelineVertexInputCreateInfo const *)stage;
880 }
881 }
882
Chris Forbes280ba2c2015-06-12 11:16:41 +1200883 if (vi) {
884 pass = validate_vi_consistency(vi) && pass;
885 }
886
Chris Forbesf044ec92015-06-05 15:01:08 +1200887 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
888 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200889 }
890
Chris Forbesf044ec92015-06-05 15:01:08 +1200891 /* TODO: enforce rules about present combinations of shaders */
892 int producer = VK_SHADER_STAGE_VERTEX;
893 int consumer = VK_SHADER_STAGE_GEOMETRY;
894
895 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
896 producer++;
897 consumer++;
Chris Forbes41002452015-04-08 10:19:16 +1200898 }
899
Tony Barbour0102a902015-06-11 15:04:25 -0600900 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbesf044ec92015-06-05 15:01:08 +1200901 assert(shaders[producer]);
902 if (shaders[consumer]) {
903 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
904 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
905 shaders[consumer], shader_stage_attribs[consumer].name,
906 shader_stage_attribs[consumer].arrayed_input) && pass;
907 }
908
909 producer = consumer;
910 }
911 }
912
913 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
914 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200915 }
916
Chris Forbes7f963832015-05-29 14:55:18 +1200917 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes81874ba2015-06-04 20:23:00 +1200918 return pass;
919}
920
921
Chris Forbes39d8d752015-06-04 20:27:09 +1200922VK_LAYER_EXPORT VkResult VKAPI
923vkCreateGraphicsPipeline(VkDevice device,
924 const VkGraphicsPipelineCreateInfo *pCreateInfo,
925 VkPipeline *pPipeline)
Chris Forbes81874ba2015-06-04 20:23:00 +1200926{
927 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200928
929 if (pass) {
930 /* The driver is allowed to crash if passed junk. Only actually create the
931 * pipeline if we didn't run into any showstoppers above.
932 */
Chris Forbes81874ba2015-06-04 20:23:00 +1200933 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200934 return pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
935 }
936 else {
937 return VK_ERROR_UNKNOWN;
938 }
Chris Forbes4175e6f2015-04-08 10:15:35 +1200939}
940
941
Chris Forbes39d8d752015-06-04 20:27:09 +1200942VK_LAYER_EXPORT VkResult VKAPI
943vkCreateGraphicsPipelineDerivative(VkDevice device,
944 const VkGraphicsPipelineCreateInfo *pCreateInfo,
945 VkPipeline basePipeline,
946 VkPipeline *pPipeline)
947{
948 bool pass = validate_graphics_pipeline(pCreateInfo);
949
950 if (pass) {
951 /* The driver is allowed to crash if passed junk. Only actually create the
952 * pipeline if we didn't run into any showstoppers above.
953 */
954 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
955 return pTable->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
956 }
957 else {
958 return VK_ERROR_UNKNOWN;
959 }
960}
961
962
Chris Forbesdb467bd2015-05-25 11:12:59 +1200963VK_LAYER_EXPORT VkResult VKAPI vkDbgRegisterMsgCallback(
964 VkInstance instance,
965 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback,
966 void *pUserData)
967{
968 // This layer intercepts callbacks
969 VK_LAYER_DBG_FUNCTION_NODE *pNewDbgFuncNode = (VK_LAYER_DBG_FUNCTION_NODE*)malloc(sizeof(VK_LAYER_DBG_FUNCTION_NODE));
970 if (!pNewDbgFuncNode)
971 return VK_ERROR_OUT_OF_HOST_MEMORY;
972 pNewDbgFuncNode->pfnMsgCallback = pfnMsgCallback;
973 pNewDbgFuncNode->pUserData = pUserData;
974 pNewDbgFuncNode->pNext = g_pDbgFunctionHead;
975 g_pDbgFunctionHead = pNewDbgFuncNode;
976 // force callbacks if DebugAction hasn't been set already other than initial value
977 if (g_actionIsDefault) {
978 g_debugAction = VK_DBG_LAYER_ACTION_CALLBACK;
979 }
Chris Forbes7f963832015-05-29 14:55:18 +1200980 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
981 VkLayerDispatchTable *pTable = tableMap[pCurObj];
982 VkResult result = pTable->DbgRegisterMsgCallback(instance, pfnMsgCallback, pUserData);
Chris Forbesdb467bd2015-05-25 11:12:59 +1200983 return result;
984}
985
986VK_LAYER_EXPORT VkResult VKAPI vkDbgUnregisterMsgCallback(
987 VkInstance instance,
988 VK_DBG_MSG_CALLBACK_FUNCTION pfnMsgCallback)
989{
990 VK_LAYER_DBG_FUNCTION_NODE *pInfo = g_pDbgFunctionHead;
991 VK_LAYER_DBG_FUNCTION_NODE *pPrev = pInfo;
992 while (pInfo) {
993 if (pInfo->pfnMsgCallback == pfnMsgCallback) {
994 pPrev->pNext = pInfo->pNext;
995 if (g_pDbgFunctionHead == pInfo) {
996 g_pDbgFunctionHead = pInfo->pNext;
997 }
998 free(pInfo);
999 break;
1000 }
1001 pPrev = pInfo;
1002 pInfo = pInfo->pNext;
1003 }
1004 if (g_pDbgFunctionHead == NULL) {
1005 if (g_actionIsDefault) {
1006 g_debugAction = VK_DBG_LAYER_ACTION_LOG_MSG;
1007 } else {
1008 g_debugAction = (VK_LAYER_DBG_ACTION)(g_debugAction & ~((uint32_t)VK_DBG_LAYER_ACTION_CALLBACK));
1009 }
1010 }
Chris Forbes7f963832015-05-29 14:55:18 +12001011 // NOT CORRECT WITH MULTIPLE DEVICES OR INSTANCES, BUT THIS IS ALL GOING AWAY SOON ANYWAY
1012 VkLayerDispatchTable *pTable = tableMap[pCurObj];
1013 VkResult result = pTable->DbgUnregisterMsgCallback(instance, pfnMsgCallback);
Chris Forbesdb467bd2015-05-25 11:12:59 +12001014 return result;
1015}
1016
1017
Chia-I Wua3b9a202015-04-17 02:00:54 +08001018VK_LAYER_EXPORT void * VKAPI vkGetProcAddr(VkPhysicalDevice gpu, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +13001019{
1020 if (gpu == NULL)
1021 return NULL;
1022
1023 initLayerTable((const VkBaseLayerObject *) gpu);
1024
Chris Forbesb6b8c462015-04-15 06:59:41 +12001025 loader_platform_thread_once(&g_initOnce, initLayer);
1026
Chris Forbes2778f302015-04-02 13:22:31 +13001027#define ADD_HOOK(fn) \
1028 if (!strncmp(#fn, pName, sizeof(#fn))) \
1029 return (void *) fn
1030
1031 ADD_HOOK(vkGetProcAddr);
1032 ADD_HOOK(vkEnumerateLayers);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001033 ADD_HOOK(vkGetGlobalExtensionInfo);
Chris Forbes2778f302015-04-02 13:22:31 +13001034 ADD_HOOK(vkCreateDevice);
1035 ADD_HOOK(vkCreateShader);
Chris Forbes4175e6f2015-04-08 10:15:35 +12001036 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbes39d8d752015-06-04 20:27:09 +12001037 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Chris Forbesdb467bd2015-05-25 11:12:59 +12001038 ADD_HOOK(vkDbgRegisterMsgCallback);
1039 ADD_HOOK(vkDbgUnregisterMsgCallback);
Chris Forbes2778f302015-04-02 13:22:31 +13001040
1041 VkBaseLayerObject* gpuw = (VkBaseLayerObject *) gpu;
1042 if (gpuw->pGPA == NULL)
1043 return NULL;
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001044 return gpuw->pGPA((VkObject) gpuw->nextObject, pName);
1045}
1046
1047VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1048{
1049 if (inst == NULL)
1050 return NULL;
1051
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -06001052 initLayerInstanceTable((const VkBaseLayerObject *) inst);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001053
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -06001054 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001055
1056#define ADD_HOOK(fn) \
1057 if (!strncmp(#fn, pName, sizeof(#fn))) \
1058 return (void *) fn
1059
1060 ADD_HOOK(vkGetProcAddr);
1061 ADD_HOOK(vkGetInstanceProcAddr);
1062 ADD_HOOK(vkEnumerateLayers);
1063 ADD_HOOK(vkGetGlobalExtensionInfo);
1064 ADD_HOOK(vkCreateDevice);
1065
1066 VkBaseLayerObject* instw = (VkBaseLayerObject *) inst;
1067 if (instw->pGPA == NULL)
1068 return NULL;
1069 return instw->pGPA((VkObject) instw->nextObject, pName);
Chris Forbes2778f302015-04-02 13:22:31 +13001070}