blob: 3bfcfe00464b72400cc764a0058de436a627ee23 [file] [log] [blame]
Chris Forbesaab9d112015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes67cc36f2015-04-13 12:14:52 +120027#include <map>
Chris Forbesaab9d112015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbesbb164b62015-04-08 10:19:16 +120029#include <map>
Chris Forbes4396ff52015-04-08 10:11:59 +120030#include <vector>
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -060031#include <string>
Chris Forbesaab9d112015-04-02 13:22:31 +130032#include "loader_platform.h"
33#include "vk_dispatch_table_helper.h"
Tobin Ehlis2d1d9702015-07-03 09:42:57 -060034#include "vk_layer.h"
Tobin Ehlis56d204a2015-07-03 10:15:26 -060035#include "vk_layer_config.h"
36#include "vk_layer_msg.h"
37#include "vk_layer_table.h"
Chris Forbes3317b382015-05-04 14:04:24 +120038#include "vk_enum_string_helper.h"
Chris Forbes5c75afe2015-04-17 10:13:28 +120039#include "shader_checker.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130040// The following is #included again to catch certain OS-specific functions
41// being used:
42#include "loader_platform.h"
43
Chris Forbes32e3b462015-05-09 10:31:21 +120044#include "spirv/spirv.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130045
Chris Forbesaab9d112015-04-02 13:22:31 +130046
Chris Forbes1b466bd2015-04-15 06:59:41 +120047static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes1ed0f982015-05-29 14:55:18 +120048// TODO : This can be much smarter, using separate locks for separate global data
49static int globalLockInitialized = 0;
50static loader_platform_thread_mutex globalLock;
Chris Forbes4396ff52015-04-08 10:11:59 +120051
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120052
53static void
54build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
55{
56 unsigned int const *code = (unsigned int const *)&words[0];
57 size_t size = words.size();
58
59 unsigned word = 5;
60 while (word < size) {
61 unsigned opcode = code[word] & 0x0ffffu;
62 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
63
64 switch (opcode) {
65 case spv::OpTypeVoid:
66 case spv::OpTypeBool:
67 case spv::OpTypeInt:
68 case spv::OpTypeFloat:
69 case spv::OpTypeVector:
70 case spv::OpTypeMatrix:
71 case spv::OpTypeSampler:
72 case spv::OpTypeFilter:
73 case spv::OpTypeArray:
74 case spv::OpTypeRuntimeArray:
75 case spv::OpTypeStruct:
76 case spv::OpTypeOpaque:
77 case spv::OpTypePointer:
78 case spv::OpTypeFunction:
79 case spv::OpTypeEvent:
80 case spv::OpTypeDeviceEvent:
81 case spv::OpTypeReserveId:
82 case spv::OpTypeQueue:
83 case spv::OpTypePipe:
84 type_def_index[code[word+1]] = word;
85 break;
86
87 default:
88 /* We only care about type definitions */
89 break;
90 }
91
92 word += oplen;
93 }
94}
95
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -060096struct shader_module {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120097 /* the spirv image itself */
Chris Forbes4396ff52015-04-08 10:11:59 +120098 std::vector<uint32_t> words;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120099 /* a mapping of <id> to the first word of its def. this is useful because walking type
100 * trees requires jumping all over the instruction stream.
101 */
102 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes4453c772015-06-05 15:01:08 +1200103 bool is_spirv;
Chris Forbes4396ff52015-04-08 10:11:59 +1200104
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600105 shader_module(VkShaderModuleCreateInfo const *pCreateInfo) :
Chris Forbes4453c772015-06-05 15:01:08 +1200106 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
107 type_def_index(),
108 is_spirv(true) {
109
110 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600111 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
Chris Forbes4453c772015-06-05 15:01:08 +1200112 "Shader is not SPIR-V, most checks will not be possible");
113 is_spirv = false;
114 return;
115 }
116
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200117
118 build_type_def_index(words, type_def_index);
Chris Forbes4396ff52015-04-08 10:11:59 +1200119 }
120};
121
122
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600123static std::unordered_map<void *, shader_module *> shader_module_map;
124
125struct shader_object {
126 std::string name;
127 struct shader_module *module;
128
129 shader_object(VkShaderCreateInfo const *pCreateInfo)
130 {
131 module = shader_module_map[pCreateInfo->module];
132 name = pCreateInfo->pName;
133 }
134};
135static std::unordered_map<void *, shader_object *> shader_object_map;
Chris Forbes4396ff52015-04-08 10:11:59 +1200136
137
Chris Forbes1b466bd2015-04-15 06:59:41 +1200138static void
139initLayer()
140{
141 const char *strOpt;
142 // initialize ShaderChecker options
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600143 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportFlags);
Chris Forbes1b466bd2015-04-15 06:59:41 +1200144 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
145
146 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
147 {
148 strOpt = getLayerOption("ShaderCheckerLogFilename");
149 if (strOpt)
150 {
151 g_logFile = fopen(strOpt, "w");
152 }
153 if (g_logFile == NULL)
154 g_logFile = stdout;
155 }
156}
157
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600158#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600159static const VkExtensionProperties shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
160 {
161 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
162 "ShaderChecker",
163 0x10,
164 "Sample layer: ShaderChecker",
Jon Ashburnade3bee2015-06-10 16:43:31 -0600165 },
166 {
167 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
168 "Validation",
169 0x10,
170 "Sample layer: ShaderChecker",
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600171 }
Chris Forbesaab9d112015-04-02 13:22:31 +1300172};
Tony Barbour426b9052015-06-24 16:06:58 -0600173VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionCount(
174 uint32_t* pCount)
175{
176 *pCount = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
177 return VK_SUCCESS;
178}
Chris Forbesaab9d112015-04-02 13:22:31 +1300179
Tony Barbour426b9052015-06-24 16:06:58 -0600180VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionProperties(
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600181 uint32_t extensionIndex,
Tony Barbour426b9052015-06-24 16:06:58 -0600182 VkExtensionProperties* pProperties)
Chris Forbesaab9d112015-04-02 13:22:31 +1300183{
Chris Forbesaab9d112015-04-02 13:22:31 +1300184 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Chris Forbesaab9d112015-04-02 13:22:31 +1300185
Tony Barbour426b9052015-06-24 16:06:58 -0600186 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
187 return VK_ERROR_INVALID_VALUE;
188 memcpy(pProperties, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Chris Forbesaab9d112015-04-02 13:22:31 +1300189
190 return VK_SUCCESS;
191}
192
Tony Barbour426b9052015-06-24 16:06:58 -0600193VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionCount(
Jon Ashburnade3bee2015-06-10 16:43:31 -0600194 VkPhysicalDevice gpu,
Tony Barbour426b9052015-06-24 16:06:58 -0600195 uint32_t* pCount)
196{
197 *pCount = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
198 return VK_SUCCESS;
199}
200
201VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionProperties(
202 VkPhysicalDevice gpu,
Jon Ashburnade3bee2015-06-10 16:43:31 -0600203 uint32_t extensionIndex,
Tony Barbour426b9052015-06-24 16:06:58 -0600204 VkExtensionProperties* pProperties)
Jon Ashburnade3bee2015-06-10 16:43:31 -0600205{
206 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Jon Ashburnade3bee2015-06-10 16:43:31 -0600207
Tony Barbour426b9052015-06-24 16:06:58 -0600208 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
209 return VK_ERROR_INVALID_VALUE;
210 memcpy(pProperties, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Jon Ashburnade3bee2015-06-10 16:43:31 -0600211
212 return VK_SUCCESS;
213}
Chris Forbesaab9d112015-04-02 13:22:31 +1300214
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200215static char const *
216storage_class_name(unsigned sc)
217{
218 switch (sc) {
Cody Northrop812b4612015-04-20 14:09:40 -0600219 case spv::StorageClassInput: return "input";
220 case spv::StorageClassOutput: return "output";
221 case spv::StorageClassUniformConstant: return "const uniform";
222 case spv::StorageClassUniform: return "uniform";
223 case spv::StorageClassWorkgroupLocal: return "workgroup local";
224 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
225 case spv::StorageClassPrivateGlobal: return "private global";
226 case spv::StorageClassFunction: return "function";
227 case spv::StorageClassGeneric: return "generic";
228 case spv::StorageClassPrivate: return "private";
229 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200230 default: return "unknown";
231 }
232}
233
234
235/* returns ptr to null terminator */
236static char *
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600237describe_type(char *dst, shader_module const *src, unsigned type)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200238{
239 auto type_def_it = src->type_def_index.find(type);
240
241 if (type_def_it == src->type_def_index.end()) {
242 return dst + sprintf(dst, "undef");
243 }
244
245 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
246 unsigned opcode = code[0] & 0x0ffffu;
247 switch (opcode) {
248 case spv::OpTypeBool:
249 return dst + sprintf(dst, "bool");
250 case spv::OpTypeInt:
251 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
252 case spv::OpTypeFloat:
253 return dst + sprintf(dst, "float%d", code[2]);
254 case spv::OpTypeVector:
255 dst += sprintf(dst, "vec%d of ", code[3]);
256 return describe_type(dst, src, code[2]);
257 case spv::OpTypeMatrix:
258 dst += sprintf(dst, "mat%d of ", code[3]);
259 return describe_type(dst, src, code[2]);
260 case spv::OpTypeArray:
261 dst += sprintf(dst, "arr[%d] of ", code[3]);
262 return describe_type(dst, src, code[2]);
263 case spv::OpTypePointer:
264 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
265 return describe_type(dst, src, code[3]);
266 case spv::OpTypeStruct:
267 {
268 unsigned oplen = code[0] >> 16;
269 dst += sprintf(dst, "struct of (");
Ian Elliottf21f14b2015-04-17 11:05:04 -0600270 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200271 dst = describe_type(dst, src, code[i]);
272 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
273 }
274 return dst;
275 }
276 default:
277 return dst + sprintf(dst, "oddtype");
278 }
279}
280
281
282static bool
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600283types_match(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200284{
285 auto a_type_def_it = a->type_def_index.find(a_type);
286 auto b_type_def_it = b->type_def_index.find(b_type);
287
288 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200289 return false;
290 }
291
292 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200293 return false;
294 }
295
296 /* walk two type trees together, and complain about differences */
297 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
298 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
299
300 unsigned a_opcode = a_code[0] & 0x0ffffu;
301 unsigned b_opcode = b_code[0] & 0x0ffffu;
302
Chris Forbes0a94a372015-06-05 14:57:05 +1200303 if (b_arrayed && b_opcode == spv::OpTypeArray) {
304 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
305 return types_match(a, b, a_type, b_code[2], false);
306 }
307
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200308 if (a_opcode != b_opcode) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200309 return false;
310 }
311
312 switch (a_opcode) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200313 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200314 case spv::OpTypeBool:
Chris Forbes0a94a372015-06-05 14:57:05 +1200315 return true && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200316 case spv::OpTypeInt:
317 /* match on width, signedness */
Chris Forbes0a94a372015-06-05 14:57:05 +1200318 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200319 case spv::OpTypeFloat:
320 /* match on width */
Chris Forbes0a94a372015-06-05 14:57:05 +1200321 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200322 case spv::OpTypeVector:
323 case spv::OpTypeMatrix:
324 case spv::OpTypeArray:
Chris Forbes0a94a372015-06-05 14:57:05 +1200325 /* match on element type, count. these all have the same layout. we don't get here if
326 * b_arrayed -- that is handled above. */
327 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200328 case spv::OpTypeStruct:
329 /* match on all element types */
330 {
Chris Forbes0a94a372015-06-05 14:57:05 +1200331 if (b_arrayed) {
332 /* for the purposes of matching different levels of arrayness, structs are leaves. */
333 return false;
334 }
335
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200336 unsigned a_len = a_code[0] >> 16;
337 unsigned b_len = b_code[0] >> 16;
338
339 if (a_len != b_len) {
340 return false; /* structs cannot match if member counts differ */
341 }
342
Ian Elliottf21f14b2015-04-17 11:05:04 -0600343 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200344 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200345 return false;
346 }
347 }
348
349 return true;
350 }
351 case spv::OpTypePointer:
352 /* match on pointee type. storage class is expected to differ */
Chris Forbes0a94a372015-06-05 14:57:05 +1200353 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200354
355 default:
356 /* remaining types are CLisms, or may not appear in the interfaces we
357 * are interested in. Just claim no match.
358 */
359 return false;
360
361 }
362}
363
364
Chris Forbes67cc36f2015-04-13 12:14:52 +1200365static int
366value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
367{
368 auto it = map.find(id);
369 if (it == map.end())
370 return def;
371 else
372 return it->second;
373}
374
375
376struct interface_var {
377 uint32_t id;
378 uint32_t type_id;
379 /* TODO: collect the name, too? Isn't required to be present. */
380};
381
382
383static void
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600384collect_interface_by_location(shader_module const *src, spv::StorageClass sinterface,
Chris Forbes67cc36f2015-04-13 12:14:52 +1200385 std::map<uint32_t, interface_var> &out,
386 std::map<uint32_t, interface_var> &builtins_out)
387{
388 unsigned int const *code = (unsigned int const *)&src->words[0];
389 size_t size = src->words.size();
390
Chris Forbes67cc36f2015-04-13 12:14:52 +1200391 std::unordered_map<unsigned, unsigned> var_locations;
392 std::unordered_map<unsigned, unsigned> var_builtins;
393
394 unsigned word = 5;
395 while (word < size) {
396
397 unsigned opcode = code[word] & 0x0ffffu;
398 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
399
400 /* We consider two interface models: SSO rendezvous-by-location, and
401 * builtins. Complain about anything that fits neither model.
402 */
403 if (opcode == spv::OpDecorate) {
Cody Northrop812b4612015-04-20 14:09:40 -0600404 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200405 var_locations[code[word+1]] = code[word+3];
406 }
407
Cody Northrop812b4612015-04-20 14:09:40 -0600408 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200409 var_builtins[code[word+1]] = code[word+3];
410 }
411 }
412
413 /* TODO: handle grouped decorations */
414 /* TODO: handle index=1 dual source outputs from FS -- two vars will
415 * have the same location, and we DONT want to clobber. */
416
Ian Elliottf21f14b2015-04-17 11:05:04 -0600417 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200418 int location = value_or_default(var_locations, code[word+2], -1);
419 int builtin = value_or_default(var_builtins, code[word+2], -1);
420
421 if (location == -1 && builtin == -1) {
422 /* No location defined, and not bound to an API builtin.
423 * The spec says nothing about how this case works (or doesn't)
424 * for interface matching.
425 */
Chris Forbes5c75afe2015-04-17 10:13:28 +1200426 char str[1024];
427 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliottf21f14b2015-04-17 11:05:04 -0600428 code[word+2], code[word+1], storage_class_name(sinterface));
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600429 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes67cc36f2015-04-13 12:14:52 +1200430 }
431 else if (location != -1) {
432 /* A user-defined interface variable, with a location. */
433 interface_var v;
434 v.id = code[word+2];
435 v.type_id = code[word+1];
436 out[location] = v;
437 }
438 else {
439 /* A builtin interface variable */
440 interface_var v;
441 v.id = code[word+2];
442 v.type_id = code[word+1];
443 builtins_out[builtin] = v;
444 }
445 }
446
447 word += oplen;
448 }
449}
450
451
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600452VK_LAYER_EXPORT VkResult VKAPI vkCreateShaderModule(
453 VkDevice device,
454 const VkShaderModuleCreateInfo *pCreateInfo,
455 VkShaderModule *pShaderModule)
Chris Forbesaab9d112015-04-02 13:22:31 +1300456{
Chris Forbes1ed0f982015-05-29 14:55:18 +1200457 loader_platform_thread_lock_mutex(&globalLock);
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600458 VkResult res = device_dispatch_table(device)->CreateShaderModule(device, pCreateInfo, pShaderModule);
Chris Forbes4396ff52015-04-08 10:11:59 +1200459
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600460 shader_module_map[(VkBaseLayerObject *) *pShaderModule] = new shader_module(pCreateInfo);
Chris Forbes1ed0f982015-05-29 14:55:18 +1200461 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesaab9d112015-04-02 13:22:31 +1300462 return res;
463}
464
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600465VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(
466 VkDevice device,
467 const VkShaderCreateInfo *pCreateInfo,
468 VkShader *pShader)
469{
470 loader_platform_thread_lock_mutex(&globalLock);
471 VkResult res = device_dispatch_table(device)->CreateShader(device, pCreateInfo, pShader);
472
473 shader_object_map[(VkBaseLayerObject *) *pShader] = new shader_object(pCreateInfo);
474 loader_platform_thread_unlock_mutex(&globalLock);
475 return res;
476}
Chris Forbesaab9d112015-04-02 13:22:31 +1300477
Chris Forbes5f362d02015-05-25 11:13:22 +1200478static bool
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600479validate_interface_between_stages(shader_module const *producer, char const *producer_name,
480 shader_module const *consumer, char const *consumer_name,
Chris Forbes4453c772015-06-05 15:01:08 +1200481 bool consumer_arrayed_input)
Chris Forbesbb164b62015-04-08 10:19:16 +1200482{
483 std::map<uint32_t, interface_var> outputs;
484 std::map<uint32_t, interface_var> inputs;
485
486 std::map<uint32_t, interface_var> builtin_outputs;
487 std::map<uint32_t, interface_var> builtin_inputs;
488
Chris Forbes5c75afe2015-04-17 10:13:28 +1200489 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200490 bool pass = true;
Chris Forbesbb164b62015-04-08 10:19:16 +1200491
Cody Northrop812b4612015-04-20 14:09:40 -0600492 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
493 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesbb164b62015-04-08 10:19:16 +1200494
495 auto a_it = outputs.begin();
496 auto b_it = inputs.begin();
497
498 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedof5997ab2015-04-27 16:36:17 -0600499 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
500 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
501 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200502 auto a_first = a_at_end ? 0 : a_it->first;
503 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600504
505 if (b_at_end || a_first < b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200506 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600507 producer_name, a_first, consumer_name);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600508 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesbb164b62015-04-08 10:19:16 +1200509 a_it++;
510 }
David Pinedof5997ab2015-04-27 16:36:17 -0600511 else if (a_at_end || a_first > b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200512 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600513 consumer_name, b_first, producer_name);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600514 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200515 pass = false;
Chris Forbesbb164b62015-04-08 10:19:16 +1200516 b_it++;
517 }
518 else {
Chris Forbes4453c772015-06-05 15:01:08 +1200519 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200520 /* OK! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200521 }
522 else {
523 char producer_type[1024];
524 char consumer_type[1024];
525 describe_type(producer_type, producer, a_it->second.type_id);
526 describe_type(consumer_type, consumer, b_it->second.type_id);
527
Chris Forbes5c75afe2015-04-17 10:13:28 +1200528 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200529 producer_type, consumer_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600530 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200531 pass = false;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200532 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200533 a_it++;
534 b_it++;
535 }
536 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200537
538 return pass;
Chris Forbesbb164b62015-04-08 10:19:16 +1200539}
540
541
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200542enum FORMAT_TYPE {
543 FORMAT_TYPE_UNDEFINED,
544 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
545 FORMAT_TYPE_SINT,
546 FORMAT_TYPE_UINT,
547};
548
549
550static unsigned
551get_format_type(VkFormat fmt) {
552 switch (fmt) {
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800553 case VK_FORMAT_UNDEFINED:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200554 return FORMAT_TYPE_UNDEFINED;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800555 case VK_FORMAT_R8_SINT:
556 case VK_FORMAT_R8G8_SINT:
557 case VK_FORMAT_R8G8B8_SINT:
558 case VK_FORMAT_R8G8B8A8_SINT:
559 case VK_FORMAT_R16_SINT:
560 case VK_FORMAT_R16G16_SINT:
561 case VK_FORMAT_R16G16B16_SINT:
562 case VK_FORMAT_R16G16B16A16_SINT:
563 case VK_FORMAT_R32_SINT:
564 case VK_FORMAT_R32G32_SINT:
565 case VK_FORMAT_R32G32B32_SINT:
566 case VK_FORMAT_R32G32B32A32_SINT:
567 case VK_FORMAT_B8G8R8_SINT:
568 case VK_FORMAT_B8G8R8A8_SINT:
569 case VK_FORMAT_R10G10B10A2_SINT:
570 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200571 return FORMAT_TYPE_SINT;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800572 case VK_FORMAT_R8_UINT:
573 case VK_FORMAT_R8G8_UINT:
574 case VK_FORMAT_R8G8B8_UINT:
575 case VK_FORMAT_R8G8B8A8_UINT:
576 case VK_FORMAT_R16_UINT:
577 case VK_FORMAT_R16G16_UINT:
578 case VK_FORMAT_R16G16B16_UINT:
579 case VK_FORMAT_R16G16B16A16_UINT:
580 case VK_FORMAT_R32_UINT:
581 case VK_FORMAT_R32G32_UINT:
582 case VK_FORMAT_R32G32B32_UINT:
583 case VK_FORMAT_R32G32B32A32_UINT:
584 case VK_FORMAT_B8G8R8_UINT:
585 case VK_FORMAT_B8G8R8A8_UINT:
586 case VK_FORMAT_R10G10B10A2_UINT:
587 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200588 return FORMAT_TYPE_UINT;
589 default:
590 return FORMAT_TYPE_FLOAT;
591 }
592}
593
594
Chris Forbes28c50882015-05-04 14:04:06 +1200595/* characterizes a SPIR-V type appearing in an interface to a FF stage,
596 * for comparison to a VkFormat's characterization above. */
597static unsigned
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600598get_fundamental_type(shader_module const *src, unsigned type)
Chris Forbes28c50882015-05-04 14:04:06 +1200599{
600 auto type_def_it = src->type_def_index.find(type);
601
602 if (type_def_it == src->type_def_index.end()) {
603 return FORMAT_TYPE_UNDEFINED;
604 }
605
606 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
607 unsigned opcode = code[0] & 0x0ffffu;
608 switch (opcode) {
609 case spv::OpTypeInt:
610 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
611 case spv::OpTypeFloat:
612 return FORMAT_TYPE_FLOAT;
613 case spv::OpTypeVector:
614 return get_fundamental_type(src, code[2]);
615 case spv::OpTypeMatrix:
616 return get_fundamental_type(src, code[2]);
617 case spv::OpTypeArray:
618 return get_fundamental_type(src, code[2]);
619 case spv::OpTypePointer:
620 return get_fundamental_type(src, code[3]);
621 default:
622 return FORMAT_TYPE_UNDEFINED;
623 }
624}
625
626
Chris Forbes5f362d02015-05-25 11:13:22 +1200627static bool
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600628validate_vi_consistency(VkPipelineVertexInputStateCreateInfo const *vi)
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200629{
630 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
631 * each binding should be specified only once.
632 */
633 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
634 char str[1024];
635 bool pass = true;
636
637 for (unsigned i = 0; i < vi->bindingCount; i++) {
638 auto desc = &vi->pVertexBindingDescriptions[i];
639 auto & binding = bindings[desc->binding];
640 if (binding) {
641 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600642 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200643 pass = false;
644 }
645 else {
646 binding = desc;
647 }
648 }
649
650 return pass;
651}
652
653
654static bool
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600655validate_vi_against_vs_inputs(VkPipelineVertexInputStateCreateInfo const *vi, shader_module const *vs)
Chris Forbesfcd05f12015-04-08 10:36:37 +1200656{
657 std::map<uint32_t, interface_var> inputs;
658 /* we collect builtin inputs, but they will never appear in the VI state --
659 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
660 */
661 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200662 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200663 bool pass = true;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200664
Cody Northrop812b4612015-04-20 14:09:40 -0600665 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200666
667 /* Build index by location */
668 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes6f2ab982015-05-25 11:13:24 +1200669 if (vi) {
670 for (unsigned i = 0; i < vi->attributeCount; i++)
671 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
672 }
Chris Forbesfcd05f12015-04-08 10:36:37 +1200673
674 auto it_a = attribs.begin();
675 auto it_b = inputs.begin();
676
David Pinedof5997ab2015-04-27 16:36:17 -0600677 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
678 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
679 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200680 auto a_first = a_at_end ? 0 : it_a->first;
681 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600682 if (b_at_end || a_first < b_first) {
683 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600684 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200685 it_a++;
686 }
David Pinedof5997ab2015-04-27 16:36:17 -0600687 else if (a_at_end || b_first < a_first) {
688 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600689 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200690 pass = false;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200691 it_b++;
692 }
693 else {
Chris Forbes3317b382015-05-04 14:04:24 +1200694 unsigned attrib_type = get_format_type(it_a->second->format);
695 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
696
697 /* type checking */
698 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
699 char vs_type[1024];
700 describe_type(vs_type, vs, it_b->second.type_id);
701 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
702 string_VkFormat(it_a->second->format), a_first, vs_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600703 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200704 pass = false;
Chris Forbes3317b382015-05-04 14:04:24 +1200705 }
706
Chris Forbes5c75afe2015-04-17 10:13:28 +1200707 /* OK! */
Chris Forbesfcd05f12015-04-08 10:36:37 +1200708 it_a++;
709 it_b++;
710 }
711 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200712
713 return pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200714}
715
716
Chris Forbes5f362d02015-05-25 11:13:22 +1200717static bool
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600718validate_fs_outputs_against_cb(shader_module const *fs, VkPipelineCbStateCreateInfo const *cb)
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200719{
720 std::map<uint32_t, interface_var> outputs;
721 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200722 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200723 bool pass = true;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200724
725 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
726
Cody Northrop812b4612015-04-20 14:09:40 -0600727 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200728
729 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
730 * and all color attachment should be UNORM/SNORM/FLOAT.
731 */
732 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200733 if (outputs.size()) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600734 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
Chris Forbes5c75afe2015-04-17 10:13:28 +1200735 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200736 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200737 }
738
Ian Elliottf21f14b2015-04-17 11:05:04 -0600739 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200740 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
741 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600742 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
Chris Forbes5c75afe2015-04-17 10:13:28 +1200743 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200744 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200745 }
746 }
747
Chris Forbes5f362d02015-05-25 11:13:22 +1200748 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200749 }
750
751 auto it = outputs.begin();
752 uint32_t attachment = 0;
753
754 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
755 * are currently dense, but the parallel with matching between shader stages is nice.
756 */
757
Chris Forbes8802c992015-05-05 11:34:14 +1200758 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan7a62cbe2015-06-01 19:48:11 +0200759 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200760 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600761 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200762 it++;
763 }
764 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200765 sprintf(str, "Attachment %d not written by FS", attachment);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600766 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200767 attachment++;
Chris Forbes5f362d02015-05-25 11:13:22 +1200768 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200769 }
770 else {
Chris Forbes4b009002015-05-04 14:20:10 +1200771 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
772 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
773
774 /* type checking */
775 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
776 char fs_type[1024];
777 describe_type(fs_type, fs, it->second.type_id);
778 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
779 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600780 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200781 pass = false;
Chris Forbes4b009002015-05-04 14:20:10 +1200782 }
783
Chris Forbes5c75afe2015-04-17 10:13:28 +1200784 /* OK! */
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200785 it++;
786 attachment++;
787 }
788 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200789
790 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200791}
792
793
Chris Forbes4453c772015-06-05 15:01:08 +1200794struct shader_stage_attributes {
795 char const * const name;
796 bool arrayed_input;
797};
798
799
800static shader_stage_attributes
801shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
802 { "vertex shader", false },
803 { "tessellation control shader", true },
804 { "tessellation evaluation shader", false },
805 { "geometry shader", true },
806 { "fragment shader", false },
807};
808
809
Chris Forbesf1060ca2015-06-04 20:23:00 +1200810static bool
811validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes60540932015-04-08 10:15:35 +1200812{
Chris Forbes8f600932015-04-08 10:16:45 +1200813 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
814 * before trying to do anything more: */
815
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600816 shader_module const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
Chris Forbes4453c772015-06-05 15:01:08 +1200817 memset(shaders, 0, sizeof(shaders));
Chris Forbes8f600932015-04-08 10:16:45 +1200818 VkPipelineCbStateCreateInfo const *cb = 0;
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600819 VkPipelineVertexInputStateCreateInfo const *vi = 0;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200820 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200821 bool pass = true;
Chris Forbes8f600932015-04-08 10:16:45 +1200822
Chris Forbes1ed0f982015-05-29 14:55:18 +1200823 loader_platform_thread_lock_mutex(&globalLock);
824
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600825 for (auto i = 0; i < pCreateInfo->stageCount; i++) {
826 VkPipelineShaderStageCreateInfo const *pStage = &pCreateInfo->pStages[i];
827 if (pStage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
Chris Forbes8f600932015-04-08 10:16:45 +1200828
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600829 if (pStage->stage < VK_SHADER_STAGE_VERTEX || pStage->stage > VK_SHADER_STAGE_FRAGMENT) {
830 sprintf(str, "Unknown shader stage %d\n", pStage->stage);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600831 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200832 }
Chris Forbes4453c772015-06-05 15:01:08 +1200833 else {
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600834 struct shader_object *shader = shader_object_map[(void *) pStage->shader];
835 shaders[pStage->stage] = shader->module;
Chris Forbes4453c772015-06-05 15:01:08 +1200836 }
Chris Forbes8f600932015-04-08 10:16:45 +1200837 }
Chris Forbes8f600932015-04-08 10:16:45 +1200838 }
839
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600840 cb = pCreateInfo->pCbState;
841 vi = pCreateInfo->pVertexInputState;
842
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200843 if (vi) {
844 pass = validate_vi_consistency(vi) && pass;
845 }
846
Chris Forbes4453c772015-06-05 15:01:08 +1200847 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
848 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200849 }
850
Chris Forbes4453c772015-06-05 15:01:08 +1200851 /* TODO: enforce rules about present combinations of shaders */
852 int producer = VK_SHADER_STAGE_VERTEX;
853 int consumer = VK_SHADER_STAGE_GEOMETRY;
854
855 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
856 producer++;
857 consumer++;
Chris Forbesbb164b62015-04-08 10:19:16 +1200858 }
859
Tony Barbour4eb3cd12015-06-11 15:04:25 -0600860 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbes4453c772015-06-05 15:01:08 +1200861 assert(shaders[producer]);
862 if (shaders[consumer]) {
863 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
864 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
865 shaders[consumer], shader_stage_attribs[consumer].name,
866 shader_stage_attribs[consumer].arrayed_input) && pass;
867 }
868
869 producer = consumer;
870 }
871 }
872
873 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
874 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200875 }
876
Chris Forbes1ed0f982015-05-29 14:55:18 +1200877 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesf1060ca2015-06-04 20:23:00 +1200878 return pass;
879}
880
881
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200882VK_LAYER_EXPORT VkResult VKAPI
883vkCreateGraphicsPipeline(VkDevice device,
884 const VkGraphicsPipelineCreateInfo *pCreateInfo,
885 VkPipeline *pPipeline)
Chris Forbesf1060ca2015-06-04 20:23:00 +1200886{
887 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbes5f362d02015-05-25 11:13:22 +1200888
889 if (pass) {
890 /* The driver is allowed to crash if passed junk. Only actually create the
891 * pipeline if we didn't run into any showstoppers above.
892 */
Jon Ashburn5a10d212015-06-01 10:02:09 -0600893 return device_dispatch_table(device)->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
Chris Forbes5f362d02015-05-25 11:13:22 +1200894 }
895 else {
896 return VK_ERROR_UNKNOWN;
897 }
Chris Forbes60540932015-04-08 10:15:35 +1200898}
899
900
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200901VK_LAYER_EXPORT VkResult VKAPI
902vkCreateGraphicsPipelineDerivative(VkDevice device,
903 const VkGraphicsPipelineCreateInfo *pCreateInfo,
904 VkPipeline basePipeline,
905 VkPipeline *pPipeline)
906{
907 bool pass = validate_graphics_pipeline(pCreateInfo);
908
909 if (pass) {
910 /* The driver is allowed to crash if passed junk. Only actually create the
911 * pipeline if we didn't run into any showstoppers above.
912 */
Jon Ashburn5a10d212015-06-01 10:02:09 -0600913 return device_dispatch_table(device)->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200914 }
915 else {
916 return VK_ERROR_UNKNOWN;
917 }
918}
919
920
Jon Ashburn17f37372015-05-19 16:34:53 -0600921/* hook DextroyDevice to remove tableMap entry */
922VK_LAYER_EXPORT VkResult VKAPI vkDestroyDevice(VkDevice device)
923{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600924 dispatch_key key = get_dispatch_key(device);
Jon Ashburn5a10d212015-06-01 10:02:09 -0600925 VkResult res = device_dispatch_table(device)->DestroyDevice(device);
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600926 destroy_device_dispatch_table(key);
Jon Ashburn17f37372015-05-19 16:34:53 -0600927 return res;
928}
929
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600930VkResult VKAPI vkCreateInstance(
931 const VkInstanceCreateInfo* pCreateInfo,
932 VkInstance* pInstance)
933{
934
935 loader_platform_thread_once(&g_initOnce, initLayer);
936 /*
937 * For layers, the pInstance has already been filled out
938 * by the loader so that dispatch table is available.
939 */
Jon Ashburnade3bee2015-06-10 16:43:31 -0600940 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(*pInstance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600941
942 VkResult result = pTable->CreateInstance(pCreateInfo, pInstance);
943
944 if (result == VK_SUCCESS) {
945 enable_debug_report(pCreateInfo->extensionCount, pCreateInfo->pEnabledExtensions);
Courtney Goeltzenleuchterf4a2eba2015-06-08 14:58:39 -0600946
947 debug_report_init_instance_extension_dispatch_table(
948 pTable,
949 pTable->GetInstanceProcAddr,
950 *pInstance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600951 }
952 return result;
953}
954
Jon Ashburn17f37372015-05-19 16:34:53 -0600955/* hook DestroyInstance to remove tableInstanceMap entry */
956VK_LAYER_EXPORT VkResult VKAPI vkDestroyInstance(VkInstance instance)
957{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600958 dispatch_key key = get_dispatch_key(instance);
Jon Ashburn5a10d212015-06-01 10:02:09 -0600959 VkResult res = instance_dispatch_table(instance)->DestroyInstance(instance);
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600960 destroy_instance_dispatch_table(key);
Jon Ashburn17f37372015-05-19 16:34:53 -0600961 return res;
962}
Chris Forbesb65ba352015-05-25 11:12:59 +1200963
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600964VK_LAYER_EXPORT VkResult VKAPI vkDbgCreateMsgCallback(
965 VkInstance instance,
966 VkFlags msgFlags,
967 const PFN_vkDbgMsgCallback pfnMsgCallback,
968 void* pUserData,
969 VkDbgMsgCallback* pMsgCallback)
970{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600971 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600972 return layer_create_msg_callback(instance, pTable, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
973}
974
975VK_LAYER_EXPORT VkResult VKAPI vkDbgDestroyMsgCallback(
976 VkInstance instance,
977 VkDbgMsgCallback msgCallback)
978{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600979 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600980 return layer_destroy_msg_callback(instance, pTable, msgCallback);
981}
982
Jon Ashburn1245cec2015-05-18 13:20:15 -0600983VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice device, const char* pName)
Chris Forbesaab9d112015-04-02 13:22:31 +1300984{
Jon Ashburn1245cec2015-05-18 13:20:15 -0600985 if (device == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +1300986 return NULL;
987
Chris Forbes1b466bd2015-04-15 06:59:41 +1200988 loader_platform_thread_once(&g_initOnce, initLayer);
989
Jon Ashburn4f2575f2015-05-28 16:25:02 -0600990 /* loader uses this to force layer initialization; device object is wrapped */
991 if (!strcmp("vkGetDeviceProcAddr", pName)) {
Jon Ashburn5a10d212015-06-01 10:02:09 -0600992 initDeviceTable((const VkBaseLayerObject *) device);
Jon Ashburn4f2575f2015-05-28 16:25:02 -0600993 return (void *) vkGetDeviceProcAddr;
994 }
995
Chris Forbesaab9d112015-04-02 13:22:31 +1300996#define ADD_HOOK(fn) \
997 if (!strncmp(#fn, pName, sizeof(#fn))) \
998 return (void *) fn
999
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -06001000 ADD_HOOK(vkCreateShaderModule);
Chris Forbesaab9d112015-04-02 13:22:31 +13001001 ADD_HOOK(vkCreateShader);
Jon Ashburn17f37372015-05-19 16:34:53 -06001002 ADD_HOOK(vkDestroyDevice);
Chris Forbes60540932015-04-08 10:15:35 +12001003 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbesd0f7f7c2015-06-04 20:27:09 +12001004 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001005#undef ADD_HOOK
Jon Ashburn5a10d212015-06-01 10:02:09 -06001006 VkLayerDispatchTable* pTable = device_dispatch_table(device);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001007 if (pTable->GetDeviceProcAddr == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +13001008 return NULL;
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001009 return pTable->GetDeviceProcAddr(device, pName);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001010}
1011
1012VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1013{
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001014 void *fptr;
1015
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001016 if (inst == NULL)
1017 return NULL;
1018
Jon Ashburnd9564002015-05-07 10:27:37 -06001019 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001020
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001021 if (!strcmp("vkGetInstanceProcAddr", pName)) {
Jon Ashburn5a10d212015-06-01 10:02:09 -06001022 initInstanceTable((const VkBaseLayerObject *) inst);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001023 return (void *) vkGetInstanceProcAddr;
1024 }
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001025#define ADD_HOOK(fn) \
1026 if (!strncmp(#fn, pName, sizeof(#fn))) \
1027 return (void *) fn
1028
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001029 ADD_HOOK(vkCreateInstance);
Jon Ashburn17f37372015-05-19 16:34:53 -06001030 ADD_HOOK(vkDestroyInstance);
Tony Barbour426b9052015-06-24 16:06:58 -06001031 ADD_HOOK(vkGetGlobalExtensionProperties);
1032 ADD_HOOK(vkGetGlobalExtensionCount);
1033 ADD_HOOK(vkGetPhysicalDeviceExtensionProperties);
1034 ADD_HOOK(vkGetPhysicalDeviceExtensionCount);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001035#undef ADD_HOOK
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001036
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001037 fptr = msg_callback_get_proc_addr(pName);
1038 if (fptr)
1039 return fptr;
1040
Jon Ashburn5a10d212015-06-01 10:02:09 -06001041 VkLayerInstanceDispatchTable* pTable = instance_dispatch_table(inst);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001042 if (pTable->GetInstanceProcAddr == NULL)
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001043 return NULL;
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001044 return pTable->GetInstanceProcAddr(inst, pName);
Chris Forbesaab9d112015-04-02 13:22:31 +13001045}