blob: d31d7f65a8181d3cbab2f3d21e3eccd02af30328 [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Chris Forbes401784b2015-05-04 14:04:24 +120036#include "vk_enum_string_helper.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120037#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130038// The following is #included again to catch certain OS-specific functions
39// being used:
40#include "loader_platform.h"
41
Chris Forbes7f720542015-05-09 10:31:21 +120042#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130043
Chris Forbes2778f302015-04-02 13:22:31 +130044
Chris Forbesb6b8c462015-04-15 06:59:41 +120045static std::unordered_map<void *, VkLayerDispatchTable *> tableMap;
Chris Forbes7f963832015-05-29 14:55:18 +120046static VkBaseLayerObject *pCurObj;
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -060047static std::unordered_map<void *, VkLayerInstanceDispatchTable *> tableInstanceMap;
Chris Forbesb6b8c462015-04-15 06:59:41 +120048static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes7f963832015-05-29 14:55:18 +120049// TODO : This can be much smarter, using separate locks for separate global data
50static int globalLockInitialized = 0;
51static loader_platform_thread_mutex globalLock;
Chris Forbes3b1c4212015-04-08 10:11:59 +120052
Chris Forbes3a5e99a2015-04-10 11:41:20 +120053
54static void
55build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
56{
57 unsigned int const *code = (unsigned int const *)&words[0];
58 size_t size = words.size();
59
60 unsigned word = 5;
61 while (word < size) {
62 unsigned opcode = code[word] & 0x0ffffu;
63 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
64
65 switch (opcode) {
66 case spv::OpTypeVoid:
67 case spv::OpTypeBool:
68 case spv::OpTypeInt:
69 case spv::OpTypeFloat:
70 case spv::OpTypeVector:
71 case spv::OpTypeMatrix:
72 case spv::OpTypeSampler:
73 case spv::OpTypeFilter:
74 case spv::OpTypeArray:
75 case spv::OpTypeRuntimeArray:
76 case spv::OpTypeStruct:
77 case spv::OpTypeOpaque:
78 case spv::OpTypePointer:
79 case spv::OpTypeFunction:
80 case spv::OpTypeEvent:
81 case spv::OpTypeDeviceEvent:
82 case spv::OpTypeReserveId:
83 case spv::OpTypeQueue:
84 case spv::OpTypePipe:
85 type_def_index[code[word+1]] = word;
86 break;
87
88 default:
89 /* We only care about type definitions */
90 break;
91 }
92
93 word += oplen;
94 }
95}
96
Chris Forbes3b1c4212015-04-08 10:11:59 +120097struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120098 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120099 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200100 /* a mapping of <id> to the first word of its def. this is useful because walking type
101 * trees requires jumping all over the instruction stream.
102 */
103 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbesf044ec92015-06-05 15:01:08 +1200104 bool is_spirv;
Chris Forbes3b1c4212015-04-08 10:11:59 +1200105
106 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbesf044ec92015-06-05 15:01:08 +1200107 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
108 type_def_index(),
109 is_spirv(true) {
110
111 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600112 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
Chris Forbesf044ec92015-06-05 15:01:08 +1200113 "Shader is not SPIR-V, most checks will not be possible");
114 is_spirv = false;
115 return;
116 }
117
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200118
119 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200120 }
121};
122
123
124static std::unordered_map<void *, shader_source *> shader_map;
125
126
Chris Forbesb6b8c462015-04-15 06:59:41 +1200127static void
128initLayer()
129{
130 const char *strOpt;
131 // initialize ShaderChecker options
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600132 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportFlags);
Chris Forbesb6b8c462015-04-15 06:59:41 +1200133 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
134
135 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
136 {
137 strOpt = getLayerOption("ShaderCheckerLogFilename");
138 if (strOpt)
139 {
140 g_logFile = fopen(strOpt, "w");
141 }
142 if (g_logFile == NULL)
143 g_logFile = stdout;
144 }
145}
146
147
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600148static VkLayerDispatchTable * initLayerTable(const VkBaseLayerObject *devw)
Chris Forbes2778f302015-04-02 13:22:31 +1300149{
150 VkLayerDispatchTable *pTable;
151
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600152 assert(devw);
153 std::unordered_map<void *, VkLayerDispatchTable *>::const_iterator it = tableMap.find((void *) devw->baseObject);
Chris Forbes2778f302015-04-02 13:22:31 +1300154 if (it == tableMap.end())
155 {
156 pTable = new VkLayerDispatchTable;
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600157 tableMap[(void *) devw->baseObject] = pTable;
Chris Forbes2778f302015-04-02 13:22:31 +1300158 } else
159 {
160 return it->second;
161 }
162
Jon Ashburn8fd08252015-05-28 16:25:02 -0600163 layer_initialize_dispatch_table(pTable, devw);
Chris Forbes2778f302015-04-02 13:22:31 +1300164
165 return pTable;
166}
167
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -0600168static VkLayerInstanceDispatchTable * initLayerInstanceTable(const VkBaseLayerObject *instw)
169{
170 VkLayerInstanceDispatchTable *pTable;
171
172 assert(instw);
173 std::unordered_map<void *, VkLayerInstanceDispatchTable *>::const_iterator it = tableInstanceMap.find((void *) instw->baseObject);
174 if (it == tableInstanceMap.end())
175 {
176 pTable = new VkLayerInstanceDispatchTable;
177 tableInstanceMap[(void *) instw->baseObject] = pTable;
178 } else
179 {
180 return it->second;
181 }
182
Jon Ashburn8fd08252015-05-28 16:25:02 -0600183 layer_init_instance_dispatch_table(pTable, instw);
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -0600184
185 return pTable;
186}
Chris Forbes2778f302015-04-02 13:22:31 +1300187
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600188#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600189static const VkExtensionProperties shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
190 {
191 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
192 "ShaderChecker",
193 0x10,
194 "Sample layer: ShaderChecker",
195// 0,
196// NULL,
197 }
Chris Forbes2778f302015-04-02 13:22:31 +1300198};
199
Chris Forbes2778f302015-04-02 13:22:31 +1300200VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600201 VkExtensionInfoType infoType,
202 uint32_t extensionIndex,
203 size_t* pDataSize,
204 void* pData)
Chris Forbes2778f302015-04-02 13:22:31 +1300205{
Chris Forbes2778f302015-04-02 13:22:31 +1300206 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Chris Forbes2778f302015-04-02 13:22:31 +1300207 uint32_t *count;
208
209 if (pDataSize == NULL)
210 return VK_ERROR_INVALID_POINTER;
211
212 switch (infoType) {
213 case VK_EXTENSION_INFO_TYPE_COUNT:
214 *pDataSize = sizeof(uint32_t);
215 if (pData == NULL)
216 return VK_SUCCESS;
217 count = (uint32_t *) pData;
218 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
219 break;
220 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
221 *pDataSize = sizeof(VkExtensionProperties);
222 if (pData == NULL)
223 return VK_SUCCESS;
224 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
225 return VK_ERROR_INVALID_VALUE;
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600226 memcpy((VkExtensionProperties *) pData, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Chris Forbes2778f302015-04-02 13:22:31 +1300227 break;
228 default:
229 return VK_ERROR_INVALID_VALUE;
230 };
231
232 return VK_SUCCESS;
233}
234
235
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200236static char const *
237storage_class_name(unsigned sc)
238{
239 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600240 case spv::StorageClassInput: return "input";
241 case spv::StorageClassOutput: return "output";
242 case spv::StorageClassUniformConstant: return "const uniform";
243 case spv::StorageClassUniform: return "uniform";
244 case spv::StorageClassWorkgroupLocal: return "workgroup local";
245 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
246 case spv::StorageClassPrivateGlobal: return "private global";
247 case spv::StorageClassFunction: return "function";
248 case spv::StorageClassGeneric: return "generic";
249 case spv::StorageClassPrivate: return "private";
250 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200251 default: return "unknown";
252 }
253}
254
255
256/* returns ptr to null terminator */
257static char *
258describe_type(char *dst, shader_source const *src, unsigned type)
259{
260 auto type_def_it = src->type_def_index.find(type);
261
262 if (type_def_it == src->type_def_index.end()) {
263 return dst + sprintf(dst, "undef");
264 }
265
266 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
267 unsigned opcode = code[0] & 0x0ffffu;
268 switch (opcode) {
269 case spv::OpTypeBool:
270 return dst + sprintf(dst, "bool");
271 case spv::OpTypeInt:
272 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
273 case spv::OpTypeFloat:
274 return dst + sprintf(dst, "float%d", code[2]);
275 case spv::OpTypeVector:
276 dst += sprintf(dst, "vec%d of ", code[3]);
277 return describe_type(dst, src, code[2]);
278 case spv::OpTypeMatrix:
279 dst += sprintf(dst, "mat%d of ", code[3]);
280 return describe_type(dst, src, code[2]);
281 case spv::OpTypeArray:
282 dst += sprintf(dst, "arr[%d] of ", code[3]);
283 return describe_type(dst, src, code[2]);
284 case spv::OpTypePointer:
285 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
286 return describe_type(dst, src, code[3]);
287 case spv::OpTypeStruct:
288 {
289 unsigned oplen = code[0] >> 16;
290 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600291 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200292 dst = describe_type(dst, src, code[i]);
293 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
294 }
295 return dst;
296 }
297 default:
298 return dst + sprintf(dst, "oddtype");
299 }
300}
301
302
303static bool
Chris Forbesf3fc0332015-06-05 14:57:05 +1200304types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200305{
306 auto a_type_def_it = a->type_def_index.find(a_type);
307 auto b_type_def_it = b->type_def_index.find(b_type);
308
309 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200310 return false;
311 }
312
313 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200314 return false;
315 }
316
317 /* walk two type trees together, and complain about differences */
318 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
319 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
320
321 unsigned a_opcode = a_code[0] & 0x0ffffu;
322 unsigned b_opcode = b_code[0] & 0x0ffffu;
323
Chris Forbesf3fc0332015-06-05 14:57:05 +1200324 if (b_arrayed && b_opcode == spv::OpTypeArray) {
325 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
326 return types_match(a, b, a_type, b_code[2], false);
327 }
328
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200329 if (a_opcode != b_opcode) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200330 return false;
331 }
332
333 switch (a_opcode) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200334 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200335 case spv::OpTypeBool:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200336 return true && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200337 case spv::OpTypeInt:
338 /* match on width, signedness */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200339 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200340 case spv::OpTypeFloat:
341 /* match on width */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200342 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200343 case spv::OpTypeVector:
344 case spv::OpTypeMatrix:
345 case spv::OpTypeArray:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200346 /* match on element type, count. these all have the same layout. we don't get here if
347 * b_arrayed -- that is handled above. */
348 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200349 case spv::OpTypeStruct:
350 /* match on all element types */
351 {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200352 if (b_arrayed) {
353 /* for the purposes of matching different levels of arrayness, structs are leaves. */
354 return false;
355 }
356
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200357 unsigned a_len = a_code[0] >> 16;
358 unsigned b_len = b_code[0] >> 16;
359
360 if (a_len != b_len) {
361 return false; /* structs cannot match if member counts differ */
362 }
363
Ian Elliott1cb62222015-04-17 11:05:04 -0600364 for (unsigned i = 2; i < a_len; i++) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200365 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200366 return false;
367 }
368 }
369
370 return true;
371 }
372 case spv::OpTypePointer:
373 /* match on pointee type. storage class is expected to differ */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200374 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200375
376 default:
377 /* remaining types are CLisms, or may not appear in the interfaces we
378 * are interested in. Just claim no match.
379 */
380 return false;
381
382 }
383}
384
385
Chris Forbes06e8fc32015-04-13 12:14:52 +1200386static int
387value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
388{
389 auto it = map.find(id);
390 if (it == map.end())
391 return def;
392 else
393 return it->second;
394}
395
396
397struct interface_var {
398 uint32_t id;
399 uint32_t type_id;
400 /* TODO: collect the name, too? Isn't required to be present. */
401};
402
403
404static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600405collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200406 std::map<uint32_t, interface_var> &out,
407 std::map<uint32_t, interface_var> &builtins_out)
408{
409 unsigned int const *code = (unsigned int const *)&src->words[0];
410 size_t size = src->words.size();
411
Chris Forbes06e8fc32015-04-13 12:14:52 +1200412 std::unordered_map<unsigned, unsigned> var_locations;
413 std::unordered_map<unsigned, unsigned> var_builtins;
414
415 unsigned word = 5;
416 while (word < size) {
417
418 unsigned opcode = code[word] & 0x0ffffu;
419 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
420
421 /* We consider two interface models: SSO rendezvous-by-location, and
422 * builtins. Complain about anything that fits neither model.
423 */
424 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600425 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200426 var_locations[code[word+1]] = code[word+3];
427 }
428
Cody Northrop97e52d82015-04-20 14:09:40 -0600429 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200430 var_builtins[code[word+1]] = code[word+3];
431 }
432 }
433
434 /* TODO: handle grouped decorations */
435 /* TODO: handle index=1 dual source outputs from FS -- two vars will
436 * have the same location, and we DONT want to clobber. */
437
Ian Elliott1cb62222015-04-17 11:05:04 -0600438 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200439 int location = value_or_default(var_locations, code[word+2], -1);
440 int builtin = value_or_default(var_builtins, code[word+2], -1);
441
442 if (location == -1 && builtin == -1) {
443 /* No location defined, and not bound to an API builtin.
444 * The spec says nothing about how this case works (or doesn't)
445 * for interface matching.
446 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200447 char str[1024];
448 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600449 code[word+2], code[word+1], storage_class_name(sinterface));
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600450 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200451 }
452 else if (location != -1) {
453 /* A user-defined interface variable, with a location. */
454 interface_var v;
455 v.id = code[word+2];
456 v.type_id = code[word+1];
457 out[location] = v;
458 }
459 else {
460 /* A builtin interface variable */
461 interface_var v;
462 v.id = code[word+2];
463 v.type_id = code[word+1];
464 builtins_out[builtin] = v;
465 }
466 }
467
468 word += oplen;
469 }
470}
471
472
Chris Forbes2778f302015-04-02 13:22:31 +1300473VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
474 VkShader *pShader)
475{
Chris Forbes7f963832015-05-29 14:55:18 +1200476 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300477 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
478 VkResult res = pTable->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200479
480 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes7f963832015-05-29 14:55:18 +1200481 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300482 return res;
483}
484
485
Chris Forbesee99b9b2015-05-25 11:13:22 +1200486static bool
Chris Forbes41002452015-04-08 10:19:16 +1200487validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbesf044ec92015-06-05 15:01:08 +1200488 shader_source const *consumer, char const *consumer_name,
489 bool consumer_arrayed_input)
Chris Forbes41002452015-04-08 10:19:16 +1200490{
491 std::map<uint32_t, interface_var> outputs;
492 std::map<uint32_t, interface_var> inputs;
493
494 std::map<uint32_t, interface_var> builtin_outputs;
495 std::map<uint32_t, interface_var> builtin_inputs;
496
Chris Forbes6b2ead62015-04-17 10:13:28 +1200497 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200498 bool pass = true;
Chris Forbes41002452015-04-08 10:19:16 +1200499
Cody Northrop97e52d82015-04-20 14:09:40 -0600500 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
501 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200502
503 auto a_it = outputs.begin();
504 auto b_it = inputs.begin();
505
506 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600507 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
508 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
509 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200510 auto a_first = a_at_end ? 0 : a_it->first;
511 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600512
513 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200514 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600515 producer_name, a_first, consumer_name);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600516 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200517 a_it++;
518 }
David Pinedod8f83d82015-04-27 16:36:17 -0600519 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200520 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600521 consumer_name, b_first, producer_name);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600522 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200523 pass = false;
Chris Forbes41002452015-04-08 10:19:16 +1200524 b_it++;
525 }
526 else {
Chris Forbesf044ec92015-06-05 15:01:08 +1200527 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200528 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200529 }
530 else {
531 char producer_type[1024];
532 char consumer_type[1024];
533 describe_type(producer_type, producer, a_it->second.type_id);
534 describe_type(consumer_type, consumer, b_it->second.type_id);
535
Chris Forbes6b2ead62015-04-17 10:13:28 +1200536 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200537 producer_type, consumer_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600538 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200539 pass = false;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200540 }
Chris Forbes41002452015-04-08 10:19:16 +1200541 a_it++;
542 b_it++;
543 }
544 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200545
546 return pass;
Chris Forbes41002452015-04-08 10:19:16 +1200547}
548
549
Chris Forbes3616b462015-04-08 10:37:20 +1200550enum FORMAT_TYPE {
551 FORMAT_TYPE_UNDEFINED,
552 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
553 FORMAT_TYPE_SINT,
554 FORMAT_TYPE_UINT,
555};
556
557
558static unsigned
559get_format_type(VkFormat fmt) {
560 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800561 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200562 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800563 case VK_FORMAT_R8_SINT:
564 case VK_FORMAT_R8G8_SINT:
565 case VK_FORMAT_R8G8B8_SINT:
566 case VK_FORMAT_R8G8B8A8_SINT:
567 case VK_FORMAT_R16_SINT:
568 case VK_FORMAT_R16G16_SINT:
569 case VK_FORMAT_R16G16B16_SINT:
570 case VK_FORMAT_R16G16B16A16_SINT:
571 case VK_FORMAT_R32_SINT:
572 case VK_FORMAT_R32G32_SINT:
573 case VK_FORMAT_R32G32B32_SINT:
574 case VK_FORMAT_R32G32B32A32_SINT:
575 case VK_FORMAT_B8G8R8_SINT:
576 case VK_FORMAT_B8G8R8A8_SINT:
577 case VK_FORMAT_R10G10B10A2_SINT:
578 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200579 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800580 case VK_FORMAT_R8_UINT:
581 case VK_FORMAT_R8G8_UINT:
582 case VK_FORMAT_R8G8B8_UINT:
583 case VK_FORMAT_R8G8B8A8_UINT:
584 case VK_FORMAT_R16_UINT:
585 case VK_FORMAT_R16G16_UINT:
586 case VK_FORMAT_R16G16B16_UINT:
587 case VK_FORMAT_R16G16B16A16_UINT:
588 case VK_FORMAT_R32_UINT:
589 case VK_FORMAT_R32G32_UINT:
590 case VK_FORMAT_R32G32B32_UINT:
591 case VK_FORMAT_R32G32B32A32_UINT:
592 case VK_FORMAT_B8G8R8_UINT:
593 case VK_FORMAT_B8G8R8A8_UINT:
594 case VK_FORMAT_R10G10B10A2_UINT:
595 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200596 return FORMAT_TYPE_UINT;
597 default:
598 return FORMAT_TYPE_FLOAT;
599 }
600}
601
602
Chris Forbes156a1162015-05-04 14:04:06 +1200603/* characterizes a SPIR-V type appearing in an interface to a FF stage,
604 * for comparison to a VkFormat's characterization above. */
605static unsigned
606get_fundamental_type(shader_source const *src, unsigned type)
607{
608 auto type_def_it = src->type_def_index.find(type);
609
610 if (type_def_it == src->type_def_index.end()) {
611 return FORMAT_TYPE_UNDEFINED;
612 }
613
614 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
615 unsigned opcode = code[0] & 0x0ffffu;
616 switch (opcode) {
617 case spv::OpTypeInt:
618 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
619 case spv::OpTypeFloat:
620 return FORMAT_TYPE_FLOAT;
621 case spv::OpTypeVector:
622 return get_fundamental_type(src, code[2]);
623 case spv::OpTypeMatrix:
624 return get_fundamental_type(src, code[2]);
625 case spv::OpTypeArray:
626 return get_fundamental_type(src, code[2]);
627 case spv::OpTypePointer:
628 return get_fundamental_type(src, code[3]);
629 default:
630 return FORMAT_TYPE_UNDEFINED;
631 }
632}
633
634
Chris Forbesee99b9b2015-05-25 11:13:22 +1200635static bool
Chris Forbes280ba2c2015-06-12 11:16:41 +1200636validate_vi_consistency(VkPipelineVertexInputCreateInfo const *vi)
637{
638 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
639 * each binding should be specified only once.
640 */
641 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
642 char str[1024];
643 bool pass = true;
644
645 for (unsigned i = 0; i < vi->bindingCount; i++) {
646 auto desc = &vi->pVertexBindingDescriptions[i];
647 auto & binding = bindings[desc->binding];
648 if (binding) {
649 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600650 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
Chris Forbes280ba2c2015-06-12 11:16:41 +1200651 pass = false;
652 }
653 else {
654 binding = desc;
655 }
656 }
657
658 return pass;
659}
660
661
662static bool
Chris Forbes772d03b2015-04-08 10:36:37 +1200663validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
664{
665 std::map<uint32_t, interface_var> inputs;
666 /* we collect builtin inputs, but they will never appear in the VI state --
667 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
668 */
669 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200670 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200671 bool pass = true;
Chris Forbes772d03b2015-04-08 10:36:37 +1200672
Cody Northrop97e52d82015-04-20 14:09:40 -0600673 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200674
675 /* Build index by location */
676 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes7191cd52015-05-25 11:13:24 +1200677 if (vi) {
678 for (unsigned i = 0; i < vi->attributeCount; i++)
679 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
680 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200681
682 auto it_a = attribs.begin();
683 auto it_b = inputs.begin();
684
David Pinedod8f83d82015-04-27 16:36:17 -0600685 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
686 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
687 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200688 auto a_first = a_at_end ? 0 : it_a->first;
689 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600690 if (b_at_end || a_first < b_first) {
691 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600692 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200693 it_a++;
694 }
David Pinedod8f83d82015-04-27 16:36:17 -0600695 else if (a_at_end || b_first < a_first) {
696 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600697 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200698 pass = false;
Chris Forbes772d03b2015-04-08 10:36:37 +1200699 it_b++;
700 }
701 else {
Chris Forbes401784b2015-05-04 14:04:24 +1200702 unsigned attrib_type = get_format_type(it_a->second->format);
703 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
704
705 /* type checking */
706 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
707 char vs_type[1024];
708 describe_type(vs_type, vs, it_b->second.type_id);
709 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
710 string_VkFormat(it_a->second->format), a_first, vs_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600711 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200712 pass = false;
Chris Forbes401784b2015-05-04 14:04:24 +1200713 }
714
Chris Forbes6b2ead62015-04-17 10:13:28 +1200715 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200716 it_a++;
717 it_b++;
718 }
719 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200720
721 return pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200722}
723
724
Chris Forbesee99b9b2015-05-25 11:13:22 +1200725static bool
Chris Forbes3616b462015-04-08 10:37:20 +1200726validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
727{
728 std::map<uint32_t, interface_var> outputs;
729 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200730 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200731 bool pass = true;
Chris Forbes3616b462015-04-08 10:37:20 +1200732
733 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
734
Cody Northrop97e52d82015-04-20 14:09:40 -0600735 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200736
737 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
738 * and all color attachment should be UNORM/SNORM/FLOAT.
739 */
740 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes3616b462015-04-08 10:37:20 +1200741 if (outputs.size()) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600742 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
Chris Forbes6b2ead62015-04-17 10:13:28 +1200743 "Should not have user-defined FS outputs when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200744 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200745 }
746
Ian Elliott1cb62222015-04-17 11:05:04 -0600747 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200748 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
749 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600750 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
Chris Forbes6b2ead62015-04-17 10:13:28 +1200751 "CB format should not be SINT or UINT when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200752 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200753 }
754 }
755
Chris Forbesee99b9b2015-05-25 11:13:22 +1200756 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200757 }
758
759 auto it = outputs.begin();
760 uint32_t attachment = 0;
761
762 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
763 * are currently dense, but the parallel with matching between shader stages is nice.
764 */
765
Chris Forbesbf2b1d22015-05-05 11:34:14 +1200766 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan3a22ce92015-06-01 19:48:11 +0200767 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200768 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600769 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200770 it++;
771 }
772 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200773 sprintf(str, "Attachment %d not written by FS", attachment);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600774 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200775 attachment++;
Chris Forbesee99b9b2015-05-25 11:13:22 +1200776 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200777 }
778 else {
Chris Forbes46d31e52015-05-04 14:20:10 +1200779 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
780 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
781
782 /* type checking */
783 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
784 char fs_type[1024];
785 describe_type(fs_type, fs, it->second.type_id);
786 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
787 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600788 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200789 pass = false;
Chris Forbes46d31e52015-05-04 14:20:10 +1200790 }
791
Chris Forbes6b2ead62015-04-17 10:13:28 +1200792 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200793 it++;
794 attachment++;
795 }
796 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200797
798 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200799}
800
801
Chris Forbesf044ec92015-06-05 15:01:08 +1200802struct shader_stage_attributes {
803 char const * const name;
804 bool arrayed_input;
805};
806
807
808static shader_stage_attributes
809shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
810 { "vertex shader", false },
811 { "tessellation control shader", true },
812 { "tessellation evaluation shader", false },
813 { "geometry shader", true },
814 { "fragment shader", false },
815};
816
817
Chris Forbes81874ba2015-06-04 20:23:00 +1200818static bool
819validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes4175e6f2015-04-08 10:15:35 +1200820{
Chris Forbesf6800b52015-04-08 10:16:45 +1200821 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
822 * before trying to do anything more: */
823
Chris Forbesf044ec92015-06-05 15:01:08 +1200824 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
825 memset(shaders, 0, sizeof(shaders));
Chris Forbesf6800b52015-04-08 10:16:45 +1200826 VkPipelineCbStateCreateInfo const *cb = 0;
827 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200828 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200829 bool pass = true;
Chris Forbesf6800b52015-04-08 10:16:45 +1200830
Chris Forbes7f963832015-05-29 14:55:18 +1200831 loader_platform_thread_lock_mutex(&globalLock);
832
Chris Forbesf6800b52015-04-08 10:16:45 +1200833 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
834 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
835 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
836
Chris Forbesf044ec92015-06-05 15:01:08 +1200837 if (shader_stage->shader.stage < VK_SHADER_STAGE_VERTEX || shader_stage->shader.stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200838 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600839 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200840 }
Chris Forbesf044ec92015-06-05 15:01:08 +1200841 else {
842 shaders[shader_stage->shader.stage] = shader_map[(void *)(shader_stage->shader.shader)];
843 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200844 }
845 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
846 cb = (VkPipelineCbStateCreateInfo const *)stage;
847 }
848 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
849 vi = (VkPipelineVertexInputCreateInfo const *)stage;
850 }
851 }
852
Chris Forbes280ba2c2015-06-12 11:16:41 +1200853 if (vi) {
854 pass = validate_vi_consistency(vi) && pass;
855 }
856
Chris Forbesf044ec92015-06-05 15:01:08 +1200857 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
858 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200859 }
860
Chris Forbesf044ec92015-06-05 15:01:08 +1200861 /* TODO: enforce rules about present combinations of shaders */
862 int producer = VK_SHADER_STAGE_VERTEX;
863 int consumer = VK_SHADER_STAGE_GEOMETRY;
864
865 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
866 producer++;
867 consumer++;
Chris Forbes41002452015-04-08 10:19:16 +1200868 }
869
Tony Barbour0102a902015-06-11 15:04:25 -0600870 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbesf044ec92015-06-05 15:01:08 +1200871 assert(shaders[producer]);
872 if (shaders[consumer]) {
873 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
874 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
875 shaders[consumer], shader_stage_attribs[consumer].name,
876 shader_stage_attribs[consumer].arrayed_input) && pass;
877 }
878
879 producer = consumer;
880 }
881 }
882
883 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
884 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200885 }
886
Chris Forbes7f963832015-05-29 14:55:18 +1200887 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes81874ba2015-06-04 20:23:00 +1200888 return pass;
889}
890
891
Chris Forbes39d8d752015-06-04 20:27:09 +1200892VK_LAYER_EXPORT VkResult VKAPI
893vkCreateGraphicsPipeline(VkDevice device,
894 const VkGraphicsPipelineCreateInfo *pCreateInfo,
895 VkPipeline *pPipeline)
Chris Forbes81874ba2015-06-04 20:23:00 +1200896{
897 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200898
899 if (pass) {
900 /* The driver is allowed to crash if passed junk. Only actually create the
901 * pipeline if we didn't run into any showstoppers above.
902 */
Chris Forbes81874ba2015-06-04 20:23:00 +1200903 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200904 return pTable->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
905 }
906 else {
907 return VK_ERROR_UNKNOWN;
908 }
Chris Forbes4175e6f2015-04-08 10:15:35 +1200909}
910
911
Chris Forbes39d8d752015-06-04 20:27:09 +1200912VK_LAYER_EXPORT VkResult VKAPI
913vkCreateGraphicsPipelineDerivative(VkDevice device,
914 const VkGraphicsPipelineCreateInfo *pCreateInfo,
915 VkPipeline basePipeline,
916 VkPipeline *pPipeline)
917{
918 bool pass = validate_graphics_pipeline(pCreateInfo);
919
920 if (pass) {
921 /* The driver is allowed to crash if passed junk. Only actually create the
922 * pipeline if we didn't run into any showstoppers above.
923 */
924 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
925 return pTable->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
926 }
927 else {
928 return VK_ERROR_UNKNOWN;
929 }
930}
931
932
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600933/* hook DextroyDevice to remove tableMap entry */
934VK_LAYER_EXPORT VkResult VKAPI vkDestroyDevice(VkDevice device)
935{
936 VkLayerDispatchTable *pTable = tableMap[(VkBaseLayerObject *)device];
937 VkResult res = pTable->DestroyDevice(device);
938 tableMap.erase(device);
939 return res;
940}
941
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600942VkResult VKAPI vkCreateInstance(
943 const VkInstanceCreateInfo* pCreateInfo,
944 VkInstance* pInstance)
945{
946
947 loader_platform_thread_once(&g_initOnce, initLayer);
948 /*
949 * For layers, the pInstance has already been filled out
950 * by the loader so that dispatch table is available.
951 */
952 VkLayerInstanceDispatchTable *pTable = initLayerInstanceTable((const VkBaseLayerObject *) (*pInstance));
953
954 VkResult result = pTable->CreateInstance(pCreateInfo, pInstance);
955
956 if (result == VK_SUCCESS) {
957 enable_debug_report(pCreateInfo->extensionCount, pCreateInfo->pEnabledExtensions);
Courtney Goeltzenleuchterd02a9642015-06-08 14:58:39 -0600958
959 debug_report_init_instance_extension_dispatch_table(
960 pTable,
961 pTable->GetInstanceProcAddr,
962 *pInstance);
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600963 }
964 return result;
965}
966
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600967/* hook DestroyInstance to remove tableInstanceMap entry */
968VK_LAYER_EXPORT VkResult VKAPI vkDestroyInstance(VkInstance instance)
969{
970 VkLayerInstanceDispatchTable *pTable = tableInstanceMap[(VkBaseLayerObject *)instance];
971 VkResult res = pTable->DestroyInstance(instance);
972 tableInstanceMap.erase(instance);
973 return res;
974}
Chris Forbesdb467bd2015-05-25 11:12:59 +1200975
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600976VK_LAYER_EXPORT VkResult VKAPI vkDbgCreateMsgCallback(
977 VkInstance instance,
978 VkFlags msgFlags,
979 const PFN_vkDbgMsgCallback pfnMsgCallback,
980 void* pUserData,
981 VkDbgMsgCallback* pMsgCallback)
982{
983 VkLayerInstanceDispatchTable *pTable = tableInstanceMap[(VkBaseLayerObject *)instance];
984 return layer_create_msg_callback(instance, pTable, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
985}
986
987VK_LAYER_EXPORT VkResult VKAPI vkDbgDestroyMsgCallback(
988 VkInstance instance,
989 VkDbgMsgCallback msgCallback)
990{
991 VkLayerInstanceDispatchTable *pTable = tableInstanceMap[(VkBaseLayerObject *)instance];
992 return layer_destroy_msg_callback(instance, pTable, msgCallback);
993}
994
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600995VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice device, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +1300996{
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600997 if (device == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300998 return NULL;
999
Chris Forbesb6b8c462015-04-15 06:59:41 +12001000 loader_platform_thread_once(&g_initOnce, initLayer);
1001
Jon Ashburn8fd08252015-05-28 16:25:02 -06001002 /* loader uses this to force layer initialization; device object is wrapped */
1003 if (!strcmp("vkGetDeviceProcAddr", pName)) {
1004 initLayerTable((const VkBaseLayerObject *) device);
1005 return (void *) vkGetDeviceProcAddr;
1006 }
1007
Chris Forbes2778f302015-04-02 13:22:31 +13001008#define ADD_HOOK(fn) \
1009 if (!strncmp(#fn, pName, sizeof(#fn))) \
1010 return (void *) fn
1011
Chris Forbes2778f302015-04-02 13:22:31 +13001012 ADD_HOOK(vkCreateShader);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -06001013 ADD_HOOK(vkDestroyDevice);
Chris Forbes4175e6f2015-04-08 10:15:35 +12001014 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbes39d8d752015-06-04 20:27:09 +12001015 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Jon Ashburne59f84f2015-05-18 09:08:41 -06001016#undef ADD_HOOK
Jon Ashburn8fd08252015-05-28 16:25:02 -06001017 VkLayerDispatchTable* pTable = tableMap[(VkBaseLayerObject *)device];
1018 if (pTable->GetDeviceProcAddr == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +13001019 return NULL;
Jon Ashburn8fd08252015-05-28 16:25:02 -06001020 return pTable->GetDeviceProcAddr(device, pName);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001021}
1022
1023VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1024{
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -06001025 void *fptr;
1026
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001027 if (inst == NULL)
1028 return NULL;
1029
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -06001030 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001031
Jon Ashburn8fd08252015-05-28 16:25:02 -06001032 if (!strcmp("vkGetInstanceProcAddr", pName)) {
1033 initLayerInstanceTable((const VkBaseLayerObject *) inst);
1034 return (void *) vkGetInstanceProcAddr;
1035 }
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001036#define ADD_HOOK(fn) \
1037 if (!strncmp(#fn, pName, sizeof(#fn))) \
1038 return (void *) fn
1039
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -06001040 ADD_HOOK(vkCreateInstance);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -06001041 ADD_HOOK(vkDestroyInstance);
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001042 ADD_HOOK(vkGetGlobalExtensionInfo);
Jon Ashburne59f84f2015-05-18 09:08:41 -06001043#undef ADD_HOOK
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001044
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -06001045 fptr = msg_callback_get_proc_addr(pName);
1046 if (fptr)
1047 return fptr;
1048
Jon Ashburn8fd08252015-05-28 16:25:02 -06001049 VkLayerInstanceDispatchTable* pTable = tableInstanceMap[(VkBaseLayerObject *) inst];
1050 if (pTable->GetInstanceProcAddr == NULL)
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001051 return NULL;
Jon Ashburn8fd08252015-05-28 16:25:02 -06001052 return pTable->GetInstanceProcAddr(inst, pName);
Chris Forbes2778f302015-04-02 13:22:31 +13001053}