blob: 6251122776408721153d181b0654a7faf0625c58 [file] [log] [blame]
Chris Forbes2778f302015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes06e8fc32015-04-13 12:14:52 +120027#include <map>
Chris Forbes2778f302015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbes41002452015-04-08 10:19:16 +120029#include <map>
Chris Forbes3b1c4212015-04-08 10:11:59 +120030#include <vector>
Chris Forbes2778f302015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbesb6b8c462015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Jon Ashburn9eed2892015-06-01 10:02:09 -060036#include "layers_table.h"
Chris Forbes401784b2015-05-04 14:04:24 +120037#include "vk_enum_string_helper.h"
Chris Forbes6b2ead62015-04-17 10:13:28 +120038#include "shader_checker.h"
Chris Forbes2778f302015-04-02 13:22:31 +130039// The following is #included again to catch certain OS-specific functions
40// being used:
41#include "loader_platform.h"
42
Chris Forbes7f720542015-05-09 10:31:21 +120043#include "spirv/spirv.h"
Chris Forbes2778f302015-04-02 13:22:31 +130044
Chris Forbes2778f302015-04-02 13:22:31 +130045
Chris Forbesb6b8c462015-04-15 06:59:41 +120046static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes7f963832015-05-29 14:55:18 +120047// TODO : This can be much smarter, using separate locks for separate global data
48static int globalLockInitialized = 0;
49static loader_platform_thread_mutex globalLock;
Chris Forbes3b1c4212015-04-08 10:11:59 +120050
Chris Forbes3a5e99a2015-04-10 11:41:20 +120051
52static void
53build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
54{
55 unsigned int const *code = (unsigned int const *)&words[0];
56 size_t size = words.size();
57
58 unsigned word = 5;
59 while (word < size) {
60 unsigned opcode = code[word] & 0x0ffffu;
61 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
62
63 switch (opcode) {
64 case spv::OpTypeVoid:
65 case spv::OpTypeBool:
66 case spv::OpTypeInt:
67 case spv::OpTypeFloat:
68 case spv::OpTypeVector:
69 case spv::OpTypeMatrix:
70 case spv::OpTypeSampler:
71 case spv::OpTypeFilter:
72 case spv::OpTypeArray:
73 case spv::OpTypeRuntimeArray:
74 case spv::OpTypeStruct:
75 case spv::OpTypeOpaque:
76 case spv::OpTypePointer:
77 case spv::OpTypeFunction:
78 case spv::OpTypeEvent:
79 case spv::OpTypeDeviceEvent:
80 case spv::OpTypeReserveId:
81 case spv::OpTypeQueue:
82 case spv::OpTypePipe:
83 type_def_index[code[word+1]] = word;
84 break;
85
86 default:
87 /* We only care about type definitions */
88 break;
89 }
90
91 word += oplen;
92 }
93}
94
Chris Forbes3b1c4212015-04-08 10:11:59 +120095struct shader_source {
Chris Forbes3a5e99a2015-04-10 11:41:20 +120096 /* the spirv image itself */
Chris Forbes3b1c4212015-04-08 10:11:59 +120097 std::vector<uint32_t> words;
Chris Forbes3a5e99a2015-04-10 11:41:20 +120098 /* a mapping of <id> to the first word of its def. this is useful because walking type
99 * trees requires jumping all over the instruction stream.
100 */
101 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbesf044ec92015-06-05 15:01:08 +1200102 bool is_spirv;
Chris Forbes3b1c4212015-04-08 10:11:59 +1200103
104 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbesf044ec92015-06-05 15:01:08 +1200105 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
106 type_def_index(),
107 is_spirv(true) {
108
109 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600110 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
Chris Forbesf044ec92015-06-05 15:01:08 +1200111 "Shader is not SPIR-V, most checks will not be possible");
112 is_spirv = false;
113 return;
114 }
115
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200116
117 build_type_def_index(words, type_def_index);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200118 }
119};
120
121
122static std::unordered_map<void *, shader_source *> shader_map;
123
124
Chris Forbesb6b8c462015-04-15 06:59:41 +1200125static void
126initLayer()
127{
128 const char *strOpt;
129 // initialize ShaderChecker options
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600130 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportFlags);
Chris Forbesb6b8c462015-04-15 06:59:41 +1200131 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
132
133 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
134 {
135 strOpt = getLayerOption("ShaderCheckerLogFilename");
136 if (strOpt)
137 {
138 g_logFile = fopen(strOpt, "w");
139 }
140 if (g_logFile == NULL)
141 g_logFile = stdout;
142 }
143}
144
Tobin Ehlis5d9c2242015-04-17 08:55:13 -0600145#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600146static const VkExtensionProperties shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
147 {
148 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
149 "ShaderChecker",
150 0x10,
151 "Sample layer: ShaderChecker",
Jon Ashburn207a3af2015-06-10 16:43:31 -0600152 },
153 {
154 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
155 "Validation",
156 0x10,
157 "Sample layer: ShaderChecker",
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600158 }
Chris Forbes2778f302015-04-02 13:22:31 +1300159};
Tony Barbour59a47322015-06-24 16:06:58 -0600160VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionCount(
161 uint32_t* pCount)
162{
163 *pCount = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
164 return VK_SUCCESS;
165}
Chris Forbes2778f302015-04-02 13:22:31 +1300166
Tony Barbour59a47322015-06-24 16:06:58 -0600167VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionProperties(
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600168 uint32_t extensionIndex,
Tony Barbour59a47322015-06-24 16:06:58 -0600169 VkExtensionProperties* pProperties)
Chris Forbes2778f302015-04-02 13:22:31 +1300170{
Chris Forbes2778f302015-04-02 13:22:31 +1300171 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Chris Forbes2778f302015-04-02 13:22:31 +1300172
Tony Barbour59a47322015-06-24 16:06:58 -0600173 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
174 return VK_ERROR_INVALID_VALUE;
175 memcpy(pProperties, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Chris Forbes2778f302015-04-02 13:22:31 +1300176
177 return VK_SUCCESS;
178}
179
Tony Barbour59a47322015-06-24 16:06:58 -0600180VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionCount(
Jon Ashburn207a3af2015-06-10 16:43:31 -0600181 VkPhysicalDevice gpu,
Tony Barbour59a47322015-06-24 16:06:58 -0600182 uint32_t* pCount)
183{
184 *pCount = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
185 return VK_SUCCESS;
186}
187
188VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionProperties(
189 VkPhysicalDevice gpu,
Jon Ashburn207a3af2015-06-10 16:43:31 -0600190 uint32_t extensionIndex,
Tony Barbour59a47322015-06-24 16:06:58 -0600191 VkExtensionProperties* pProperties)
Jon Ashburn207a3af2015-06-10 16:43:31 -0600192{
193 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Jon Ashburn207a3af2015-06-10 16:43:31 -0600194
Tony Barbour59a47322015-06-24 16:06:58 -0600195 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
196 return VK_ERROR_INVALID_VALUE;
197 memcpy(pProperties, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Jon Ashburn207a3af2015-06-10 16:43:31 -0600198
199 return VK_SUCCESS;
200}
Chris Forbes2778f302015-04-02 13:22:31 +1300201
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200202static char const *
203storage_class_name(unsigned sc)
204{
205 switch (sc) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600206 case spv::StorageClassInput: return "input";
207 case spv::StorageClassOutput: return "output";
208 case spv::StorageClassUniformConstant: return "const uniform";
209 case spv::StorageClassUniform: return "uniform";
210 case spv::StorageClassWorkgroupLocal: return "workgroup local";
211 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
212 case spv::StorageClassPrivateGlobal: return "private global";
213 case spv::StorageClassFunction: return "function";
214 case spv::StorageClassGeneric: return "generic";
215 case spv::StorageClassPrivate: return "private";
216 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200217 default: return "unknown";
218 }
219}
220
221
222/* returns ptr to null terminator */
223static char *
224describe_type(char *dst, shader_source const *src, unsigned type)
225{
226 auto type_def_it = src->type_def_index.find(type);
227
228 if (type_def_it == src->type_def_index.end()) {
229 return dst + sprintf(dst, "undef");
230 }
231
232 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
233 unsigned opcode = code[0] & 0x0ffffu;
234 switch (opcode) {
235 case spv::OpTypeBool:
236 return dst + sprintf(dst, "bool");
237 case spv::OpTypeInt:
238 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
239 case spv::OpTypeFloat:
240 return dst + sprintf(dst, "float%d", code[2]);
241 case spv::OpTypeVector:
242 dst += sprintf(dst, "vec%d of ", code[3]);
243 return describe_type(dst, src, code[2]);
244 case spv::OpTypeMatrix:
245 dst += sprintf(dst, "mat%d of ", code[3]);
246 return describe_type(dst, src, code[2]);
247 case spv::OpTypeArray:
248 dst += sprintf(dst, "arr[%d] of ", code[3]);
249 return describe_type(dst, src, code[2]);
250 case spv::OpTypePointer:
251 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
252 return describe_type(dst, src, code[3]);
253 case spv::OpTypeStruct:
254 {
255 unsigned oplen = code[0] >> 16;
256 dst += sprintf(dst, "struct of (");
Ian Elliott1cb62222015-04-17 11:05:04 -0600257 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200258 dst = describe_type(dst, src, code[i]);
259 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
260 }
261 return dst;
262 }
263 default:
264 return dst + sprintf(dst, "oddtype");
265 }
266}
267
268
269static bool
Chris Forbesf3fc0332015-06-05 14:57:05 +1200270types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200271{
272 auto a_type_def_it = a->type_def_index.find(a_type);
273 auto b_type_def_it = b->type_def_index.find(b_type);
274
275 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200276 return false;
277 }
278
279 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200280 return false;
281 }
282
283 /* walk two type trees together, and complain about differences */
284 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
285 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
286
287 unsigned a_opcode = a_code[0] & 0x0ffffu;
288 unsigned b_opcode = b_code[0] & 0x0ffffu;
289
Chris Forbesf3fc0332015-06-05 14:57:05 +1200290 if (b_arrayed && b_opcode == spv::OpTypeArray) {
291 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
292 return types_match(a, b, a_type, b_code[2], false);
293 }
294
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200295 if (a_opcode != b_opcode) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200296 return false;
297 }
298
299 switch (a_opcode) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200300 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200301 case spv::OpTypeBool:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200302 return true && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200303 case spv::OpTypeInt:
304 /* match on width, signedness */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200305 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200306 case spv::OpTypeFloat:
307 /* match on width */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200308 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200309 case spv::OpTypeVector:
310 case spv::OpTypeMatrix:
311 case spv::OpTypeArray:
Chris Forbesf3fc0332015-06-05 14:57:05 +1200312 /* match on element type, count. these all have the same layout. we don't get here if
313 * b_arrayed -- that is handled above. */
314 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200315 case spv::OpTypeStruct:
316 /* match on all element types */
317 {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200318 if (b_arrayed) {
319 /* for the purposes of matching different levels of arrayness, structs are leaves. */
320 return false;
321 }
322
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200323 unsigned a_len = a_code[0] >> 16;
324 unsigned b_len = b_code[0] >> 16;
325
326 if (a_len != b_len) {
327 return false; /* structs cannot match if member counts differ */
328 }
329
Ian Elliott1cb62222015-04-17 11:05:04 -0600330 for (unsigned i = 2; i < a_len; i++) {
Chris Forbesf3fc0332015-06-05 14:57:05 +1200331 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200332 return false;
333 }
334 }
335
336 return true;
337 }
338 case spv::OpTypePointer:
339 /* match on pointee type. storage class is expected to differ */
Chris Forbesf3fc0332015-06-05 14:57:05 +1200340 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200341
342 default:
343 /* remaining types are CLisms, or may not appear in the interfaces we
344 * are interested in. Just claim no match.
345 */
346 return false;
347
348 }
349}
350
351
Chris Forbes06e8fc32015-04-13 12:14:52 +1200352static int
353value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
354{
355 auto it = map.find(id);
356 if (it == map.end())
357 return def;
358 else
359 return it->second;
360}
361
362
363struct interface_var {
364 uint32_t id;
365 uint32_t type_id;
366 /* TODO: collect the name, too? Isn't required to be present. */
367};
368
369
370static void
Ian Elliott1cb62222015-04-17 11:05:04 -0600371collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes06e8fc32015-04-13 12:14:52 +1200372 std::map<uint32_t, interface_var> &out,
373 std::map<uint32_t, interface_var> &builtins_out)
374{
375 unsigned int const *code = (unsigned int const *)&src->words[0];
376 size_t size = src->words.size();
377
Chris Forbes06e8fc32015-04-13 12:14:52 +1200378 std::unordered_map<unsigned, unsigned> var_locations;
379 std::unordered_map<unsigned, unsigned> var_builtins;
380
381 unsigned word = 5;
382 while (word < size) {
383
384 unsigned opcode = code[word] & 0x0ffffu;
385 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
386
387 /* We consider two interface models: SSO rendezvous-by-location, and
388 * builtins. Complain about anything that fits neither model.
389 */
390 if (opcode == spv::OpDecorate) {
Cody Northrop97e52d82015-04-20 14:09:40 -0600391 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200392 var_locations[code[word+1]] = code[word+3];
393 }
394
Cody Northrop97e52d82015-04-20 14:09:40 -0600395 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200396 var_builtins[code[word+1]] = code[word+3];
397 }
398 }
399
400 /* TODO: handle grouped decorations */
401 /* TODO: handle index=1 dual source outputs from FS -- two vars will
402 * have the same location, and we DONT want to clobber. */
403
Ian Elliott1cb62222015-04-17 11:05:04 -0600404 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes06e8fc32015-04-13 12:14:52 +1200405 int location = value_or_default(var_locations, code[word+2], -1);
406 int builtin = value_or_default(var_builtins, code[word+2], -1);
407
408 if (location == -1 && builtin == -1) {
409 /* No location defined, and not bound to an API builtin.
410 * The spec says nothing about how this case works (or doesn't)
411 * for interface matching.
412 */
Chris Forbes6b2ead62015-04-17 10:13:28 +1200413 char str[1024];
414 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliott1cb62222015-04-17 11:05:04 -0600415 code[word+2], code[word+1], storage_class_name(sinterface));
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600416 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes06e8fc32015-04-13 12:14:52 +1200417 }
418 else if (location != -1) {
419 /* A user-defined interface variable, with a location. */
420 interface_var v;
421 v.id = code[word+2];
422 v.type_id = code[word+1];
423 out[location] = v;
424 }
425 else {
426 /* A builtin interface variable */
427 interface_var v;
428 v.id = code[word+2];
429 v.type_id = code[word+1];
430 builtins_out[builtin] = v;
431 }
432 }
433
434 word += oplen;
435 }
436}
437
438
Chris Forbes2778f302015-04-02 13:22:31 +1300439VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
440 VkShader *pShader)
441{
Chris Forbes7f963832015-05-29 14:55:18 +1200442 loader_platform_thread_lock_mutex(&globalLock);
Jon Ashburn9eed2892015-06-01 10:02:09 -0600443 VkResult res = device_dispatch_table(device)->CreateShader(device, pCreateInfo, pShader);
Chris Forbes3b1c4212015-04-08 10:11:59 +1200444
445 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes7f963832015-05-29 14:55:18 +1200446 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes2778f302015-04-02 13:22:31 +1300447 return res;
448}
449
450
Chris Forbesee99b9b2015-05-25 11:13:22 +1200451static bool
Chris Forbes41002452015-04-08 10:19:16 +1200452validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbesf044ec92015-06-05 15:01:08 +1200453 shader_source const *consumer, char const *consumer_name,
454 bool consumer_arrayed_input)
Chris Forbes41002452015-04-08 10:19:16 +1200455{
456 std::map<uint32_t, interface_var> outputs;
457 std::map<uint32_t, interface_var> inputs;
458
459 std::map<uint32_t, interface_var> builtin_outputs;
460 std::map<uint32_t, interface_var> builtin_inputs;
461
Chris Forbes6b2ead62015-04-17 10:13:28 +1200462 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200463 bool pass = true;
Chris Forbes41002452015-04-08 10:19:16 +1200464
Cody Northrop97e52d82015-04-20 14:09:40 -0600465 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
466 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes41002452015-04-08 10:19:16 +1200467
468 auto a_it = outputs.begin();
469 auto b_it = inputs.begin();
470
471 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedod8f83d82015-04-27 16:36:17 -0600472 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
473 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
474 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200475 auto a_first = a_at_end ? 0 : a_it->first;
476 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600477
478 if (b_at_end || a_first < b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200479 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600480 producer_name, a_first, consumer_name);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600481 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes41002452015-04-08 10:19:16 +1200482 a_it++;
483 }
David Pinedod8f83d82015-04-27 16:36:17 -0600484 else if (a_at_end || a_first > b_first) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200485 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedod8f83d82015-04-27 16:36:17 -0600486 consumer_name, b_first, producer_name);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600487 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200488 pass = false;
Chris Forbes41002452015-04-08 10:19:16 +1200489 b_it++;
490 }
491 else {
Chris Forbesf044ec92015-06-05 15:01:08 +1200492 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200493 /* OK! */
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200494 }
495 else {
496 char producer_type[1024];
497 char consumer_type[1024];
498 describe_type(producer_type, producer, a_it->second.type_id);
499 describe_type(consumer_type, consumer, b_it->second.type_id);
500
Chris Forbes6b2ead62015-04-17 10:13:28 +1200501 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200502 producer_type, consumer_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600503 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200504 pass = false;
Chris Forbes3a5e99a2015-04-10 11:41:20 +1200505 }
Chris Forbes41002452015-04-08 10:19:16 +1200506 a_it++;
507 b_it++;
508 }
509 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200510
511 return pass;
Chris Forbes41002452015-04-08 10:19:16 +1200512}
513
514
Chris Forbes3616b462015-04-08 10:37:20 +1200515enum FORMAT_TYPE {
516 FORMAT_TYPE_UNDEFINED,
517 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
518 FORMAT_TYPE_SINT,
519 FORMAT_TYPE_UINT,
520};
521
522
523static unsigned
524get_format_type(VkFormat fmt) {
525 switch (fmt) {
Chia-I Wua3b9a202015-04-17 02:00:54 +0800526 case VK_FORMAT_UNDEFINED:
Chris Forbes3616b462015-04-08 10:37:20 +1200527 return FORMAT_TYPE_UNDEFINED;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800528 case VK_FORMAT_R8_SINT:
529 case VK_FORMAT_R8G8_SINT:
530 case VK_FORMAT_R8G8B8_SINT:
531 case VK_FORMAT_R8G8B8A8_SINT:
532 case VK_FORMAT_R16_SINT:
533 case VK_FORMAT_R16G16_SINT:
534 case VK_FORMAT_R16G16B16_SINT:
535 case VK_FORMAT_R16G16B16A16_SINT:
536 case VK_FORMAT_R32_SINT:
537 case VK_FORMAT_R32G32_SINT:
538 case VK_FORMAT_R32G32B32_SINT:
539 case VK_FORMAT_R32G32B32A32_SINT:
540 case VK_FORMAT_B8G8R8_SINT:
541 case VK_FORMAT_B8G8R8A8_SINT:
542 case VK_FORMAT_R10G10B10A2_SINT:
543 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200544 return FORMAT_TYPE_SINT;
Chia-I Wua3b9a202015-04-17 02:00:54 +0800545 case VK_FORMAT_R8_UINT:
546 case VK_FORMAT_R8G8_UINT:
547 case VK_FORMAT_R8G8B8_UINT:
548 case VK_FORMAT_R8G8B8A8_UINT:
549 case VK_FORMAT_R16_UINT:
550 case VK_FORMAT_R16G16_UINT:
551 case VK_FORMAT_R16G16B16_UINT:
552 case VK_FORMAT_R16G16B16A16_UINT:
553 case VK_FORMAT_R32_UINT:
554 case VK_FORMAT_R32G32_UINT:
555 case VK_FORMAT_R32G32B32_UINT:
556 case VK_FORMAT_R32G32B32A32_UINT:
557 case VK_FORMAT_B8G8R8_UINT:
558 case VK_FORMAT_B8G8R8A8_UINT:
559 case VK_FORMAT_R10G10B10A2_UINT:
560 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes3616b462015-04-08 10:37:20 +1200561 return FORMAT_TYPE_UINT;
562 default:
563 return FORMAT_TYPE_FLOAT;
564 }
565}
566
567
Chris Forbes156a1162015-05-04 14:04:06 +1200568/* characterizes a SPIR-V type appearing in an interface to a FF stage,
569 * for comparison to a VkFormat's characterization above. */
570static unsigned
571get_fundamental_type(shader_source const *src, unsigned type)
572{
573 auto type_def_it = src->type_def_index.find(type);
574
575 if (type_def_it == src->type_def_index.end()) {
576 return FORMAT_TYPE_UNDEFINED;
577 }
578
579 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
580 unsigned opcode = code[0] & 0x0ffffu;
581 switch (opcode) {
582 case spv::OpTypeInt:
583 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
584 case spv::OpTypeFloat:
585 return FORMAT_TYPE_FLOAT;
586 case spv::OpTypeVector:
587 return get_fundamental_type(src, code[2]);
588 case spv::OpTypeMatrix:
589 return get_fundamental_type(src, code[2]);
590 case spv::OpTypeArray:
591 return get_fundamental_type(src, code[2]);
592 case spv::OpTypePointer:
593 return get_fundamental_type(src, code[3]);
594 default:
595 return FORMAT_TYPE_UNDEFINED;
596 }
597}
598
599
Chris Forbesee99b9b2015-05-25 11:13:22 +1200600static bool
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600601validate_vi_consistency(VkPipelineVertexInputStateCreateInfo const *vi)
Chris Forbes280ba2c2015-06-12 11:16:41 +1200602{
603 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
604 * each binding should be specified only once.
605 */
606 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
607 char str[1024];
608 bool pass = true;
609
610 for (unsigned i = 0; i < vi->bindingCount; i++) {
611 auto desc = &vi->pVertexBindingDescriptions[i];
612 auto & binding = bindings[desc->binding];
613 if (binding) {
614 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600615 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
Chris Forbes280ba2c2015-06-12 11:16:41 +1200616 pass = false;
617 }
618 else {
619 binding = desc;
620 }
621 }
622
623 return pass;
624}
625
626
627static bool
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600628validate_vi_against_vs_inputs(VkPipelineVertexInputStateCreateInfo const *vi, shader_source const *vs)
Chris Forbes772d03b2015-04-08 10:36:37 +1200629{
630 std::map<uint32_t, interface_var> inputs;
631 /* we collect builtin inputs, but they will never appear in the VI state --
632 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
633 */
634 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200635 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200636 bool pass = true;
Chris Forbes772d03b2015-04-08 10:36:37 +1200637
Cody Northrop97e52d82015-04-20 14:09:40 -0600638 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbes772d03b2015-04-08 10:36:37 +1200639
640 /* Build index by location */
641 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes7191cd52015-05-25 11:13:24 +1200642 if (vi) {
643 for (unsigned i = 0; i < vi->attributeCount; i++)
644 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
645 }
Chris Forbes772d03b2015-04-08 10:36:37 +1200646
647 auto it_a = attribs.begin();
648 auto it_b = inputs.begin();
649
David Pinedod8f83d82015-04-27 16:36:17 -0600650 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
651 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
652 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes62cc3fc2015-06-10 08:37:27 +1200653 auto a_first = a_at_end ? 0 : it_a->first;
654 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedod8f83d82015-04-27 16:36:17 -0600655 if (b_at_end || a_first < b_first) {
656 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600657 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes772d03b2015-04-08 10:36:37 +1200658 it_a++;
659 }
David Pinedod8f83d82015-04-27 16:36:17 -0600660 else if (a_at_end || b_first < a_first) {
661 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600662 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200663 pass = false;
Chris Forbes772d03b2015-04-08 10:36:37 +1200664 it_b++;
665 }
666 else {
Chris Forbes401784b2015-05-04 14:04:24 +1200667 unsigned attrib_type = get_format_type(it_a->second->format);
668 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
669
670 /* type checking */
671 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
672 char vs_type[1024];
673 describe_type(vs_type, vs, it_b->second.type_id);
674 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
675 string_VkFormat(it_a->second->format), a_first, vs_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600676 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200677 pass = false;
Chris Forbes401784b2015-05-04 14:04:24 +1200678 }
679
Chris Forbes6b2ead62015-04-17 10:13:28 +1200680 /* OK! */
Chris Forbes772d03b2015-04-08 10:36:37 +1200681 it_a++;
682 it_b++;
683 }
684 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200685
686 return pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200687}
688
689
Chris Forbesee99b9b2015-05-25 11:13:22 +1200690static bool
Chris Forbes3616b462015-04-08 10:37:20 +1200691validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
692{
693 std::map<uint32_t, interface_var> outputs;
694 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200695 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200696 bool pass = true;
Chris Forbes3616b462015-04-08 10:37:20 +1200697
698 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
699
Cody Northrop97e52d82015-04-20 14:09:40 -0600700 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes3616b462015-04-08 10:37:20 +1200701
702 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
703 * and all color attachment should be UNORM/SNORM/FLOAT.
704 */
705 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes3616b462015-04-08 10:37:20 +1200706 if (outputs.size()) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600707 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
Chris Forbes6b2ead62015-04-17 10:13:28 +1200708 "Should not have user-defined FS outputs when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200709 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200710 }
711
Ian Elliott1cb62222015-04-17 11:05:04 -0600712 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes3616b462015-04-08 10:37:20 +1200713 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
714 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600715 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
Chris Forbes6b2ead62015-04-17 10:13:28 +1200716 "CB format should not be SINT or UINT when using broadcast");
Chris Forbesee99b9b2015-05-25 11:13:22 +1200717 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200718 }
719 }
720
Chris Forbesee99b9b2015-05-25 11:13:22 +1200721 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200722 }
723
724 auto it = outputs.begin();
725 uint32_t attachment = 0;
726
727 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
728 * are currently dense, but the parallel with matching between shader stages is nice.
729 */
730
Chris Forbesbf2b1d22015-05-05 11:34:14 +1200731 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan3a22ce92015-06-01 19:48:11 +0200732 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200733 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600734 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200735 it++;
736 }
737 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes6b2ead62015-04-17 10:13:28 +1200738 sprintf(str, "Attachment %d not written by FS", attachment);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600739 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes3616b462015-04-08 10:37:20 +1200740 attachment++;
Chris Forbesee99b9b2015-05-25 11:13:22 +1200741 pass = false;
Chris Forbes3616b462015-04-08 10:37:20 +1200742 }
743 else {
Chris Forbes46d31e52015-05-04 14:20:10 +1200744 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
745 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
746
747 /* type checking */
748 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
749 char fs_type[1024];
750 describe_type(fs_type, fs, it->second.type_id);
751 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
752 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600753 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200754 pass = false;
Chris Forbes46d31e52015-05-04 14:20:10 +1200755 }
756
Chris Forbes6b2ead62015-04-17 10:13:28 +1200757 /* OK! */
Chris Forbes3616b462015-04-08 10:37:20 +1200758 it++;
759 attachment++;
760 }
761 }
Chris Forbesee99b9b2015-05-25 11:13:22 +1200762
763 return pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200764}
765
766
Chris Forbesf044ec92015-06-05 15:01:08 +1200767struct shader_stage_attributes {
768 char const * const name;
769 bool arrayed_input;
770};
771
772
773static shader_stage_attributes
774shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
775 { "vertex shader", false },
776 { "tessellation control shader", true },
777 { "tessellation evaluation shader", false },
778 { "geometry shader", true },
779 { "fragment shader", false },
780};
781
782
Chris Forbes81874ba2015-06-04 20:23:00 +1200783static bool
784validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes4175e6f2015-04-08 10:15:35 +1200785{
Chris Forbesf6800b52015-04-08 10:16:45 +1200786 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
787 * before trying to do anything more: */
788
Chris Forbesf044ec92015-06-05 15:01:08 +1200789 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
790 memset(shaders, 0, sizeof(shaders));
Chris Forbesf6800b52015-04-08 10:16:45 +1200791 VkPipelineCbStateCreateInfo const *cb = 0;
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600792 VkPipelineVertexInputStateCreateInfo const *vi = 0;
Chris Forbes6b2ead62015-04-17 10:13:28 +1200793 char str[1024];
Chris Forbesee99b9b2015-05-25 11:13:22 +1200794 bool pass = true;
Chris Forbesf6800b52015-04-08 10:16:45 +1200795
Chris Forbes7f963832015-05-29 14:55:18 +1200796 loader_platform_thread_lock_mutex(&globalLock);
797
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600798 for (auto i = 0; i < pCreateInfo->stageCount; i++) {
799 VkPipelineShaderStageCreateInfo const *pStage = &pCreateInfo->pStages[i];
800 if (pStage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
Chris Forbesf6800b52015-04-08 10:16:45 +1200801
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600802 if (pStage->stage < VK_SHADER_STAGE_VERTEX || pStage->stage > VK_SHADER_STAGE_FRAGMENT) {
803 sprintf(str, "Unknown shader stage %d\n", pStage->stage);
Courtney Goeltzenleuchterf579fa62015-06-10 17:39:03 -0600804 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
Chris Forbes6b2ead62015-04-17 10:13:28 +1200805 }
Chris Forbesf044ec92015-06-05 15:01:08 +1200806 else {
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600807 shaders[pStage->stage] = shader_map[(void *)(pStage->shader)];
Chris Forbesf044ec92015-06-05 15:01:08 +1200808 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200809 }
Chris Forbesf6800b52015-04-08 10:16:45 +1200810 }
811
Mark Lobodzinskid5732f32015-06-23 15:11:57 -0600812 cb = pCreateInfo->pCbState;
813 vi = pCreateInfo->pVertexInputState;
814
Chris Forbes280ba2c2015-06-12 11:16:41 +1200815 if (vi) {
816 pass = validate_vi_consistency(vi) && pass;
817 }
818
Chris Forbesf044ec92015-06-05 15:01:08 +1200819 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
820 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbes772d03b2015-04-08 10:36:37 +1200821 }
822
Chris Forbesf044ec92015-06-05 15:01:08 +1200823 /* TODO: enforce rules about present combinations of shaders */
824 int producer = VK_SHADER_STAGE_VERTEX;
825 int consumer = VK_SHADER_STAGE_GEOMETRY;
826
827 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
828 producer++;
829 consumer++;
Chris Forbes41002452015-04-08 10:19:16 +1200830 }
831
Tony Barbour0102a902015-06-11 15:04:25 -0600832 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbesf044ec92015-06-05 15:01:08 +1200833 assert(shaders[producer]);
834 if (shaders[consumer]) {
835 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
836 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
837 shaders[consumer], shader_stage_attribs[consumer].name,
838 shader_stage_attribs[consumer].arrayed_input) && pass;
839 }
840
841 producer = consumer;
842 }
843 }
844
845 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
846 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes3616b462015-04-08 10:37:20 +1200847 }
848
Chris Forbes7f963832015-05-29 14:55:18 +1200849 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbes81874ba2015-06-04 20:23:00 +1200850 return pass;
851}
852
853
Chris Forbes39d8d752015-06-04 20:27:09 +1200854VK_LAYER_EXPORT VkResult VKAPI
855vkCreateGraphicsPipeline(VkDevice device,
856 const VkGraphicsPipelineCreateInfo *pCreateInfo,
857 VkPipeline *pPipeline)
Chris Forbes81874ba2015-06-04 20:23:00 +1200858{
859 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200860
861 if (pass) {
862 /* The driver is allowed to crash if passed junk. Only actually create the
863 * pipeline if we didn't run into any showstoppers above.
864 */
Jon Ashburn9eed2892015-06-01 10:02:09 -0600865 return device_dispatch_table(device)->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
Chris Forbesee99b9b2015-05-25 11:13:22 +1200866 }
867 else {
868 return VK_ERROR_UNKNOWN;
869 }
Chris Forbes4175e6f2015-04-08 10:15:35 +1200870}
871
872
Chris Forbes39d8d752015-06-04 20:27:09 +1200873VK_LAYER_EXPORT VkResult VKAPI
874vkCreateGraphicsPipelineDerivative(VkDevice device,
875 const VkGraphicsPipelineCreateInfo *pCreateInfo,
876 VkPipeline basePipeline,
877 VkPipeline *pPipeline)
878{
879 bool pass = validate_graphics_pipeline(pCreateInfo);
880
881 if (pass) {
882 /* The driver is allowed to crash if passed junk. Only actually create the
883 * pipeline if we didn't run into any showstoppers above.
884 */
Jon Ashburn9eed2892015-06-01 10:02:09 -0600885 return device_dispatch_table(device)->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
Chris Forbes39d8d752015-06-04 20:27:09 +1200886 }
887 else {
888 return VK_ERROR_UNKNOWN;
889 }
890}
891
892
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600893/* hook DextroyDevice to remove tableMap entry */
894VK_LAYER_EXPORT VkResult VKAPI vkDestroyDevice(VkDevice device)
895{
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600896 dispatch_key key = get_dispatch_key(device);
Jon Ashburn9eed2892015-06-01 10:02:09 -0600897 VkResult res = device_dispatch_table(device)->DestroyDevice(device);
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600898 destroy_device_dispatch_table(key);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600899 return res;
900}
901
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600902VkResult VKAPI vkCreateInstance(
903 const VkInstanceCreateInfo* pCreateInfo,
904 VkInstance* pInstance)
905{
906
907 loader_platform_thread_once(&g_initOnce, initLayer);
908 /*
909 * For layers, the pInstance has already been filled out
910 * by the loader so that dispatch table is available.
911 */
Jon Ashburn207a3af2015-06-10 16:43:31 -0600912 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(*pInstance);
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600913
914 VkResult result = pTable->CreateInstance(pCreateInfo, pInstance);
915
916 if (result == VK_SUCCESS) {
917 enable_debug_report(pCreateInfo->extensionCount, pCreateInfo->pEnabledExtensions);
Courtney Goeltzenleuchterd02a9642015-06-08 14:58:39 -0600918
919 debug_report_init_instance_extension_dispatch_table(
920 pTable,
921 pTable->GetInstanceProcAddr,
922 *pInstance);
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600923 }
924 return result;
925}
926
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600927/* hook DestroyInstance to remove tableInstanceMap entry */
928VK_LAYER_EXPORT VkResult VKAPI vkDestroyInstance(VkInstance instance)
929{
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600930 dispatch_key key = get_dispatch_key(instance);
Jon Ashburn9eed2892015-06-01 10:02:09 -0600931 VkResult res = instance_dispatch_table(instance)->DestroyInstance(instance);
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600932 destroy_instance_dispatch_table(key);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600933 return res;
934}
Chris Forbesdb467bd2015-05-25 11:12:59 +1200935
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600936VK_LAYER_EXPORT VkResult VKAPI vkDbgCreateMsgCallback(
937 VkInstance instance,
938 VkFlags msgFlags,
939 const PFN_vkDbgMsgCallback pfnMsgCallback,
940 void* pUserData,
941 VkDbgMsgCallback* pMsgCallback)
942{
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600943 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600944 return layer_create_msg_callback(instance, pTable, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
945}
946
947VK_LAYER_EXPORT VkResult VKAPI vkDbgDestroyMsgCallback(
948 VkInstance instance,
949 VkDbgMsgCallback msgCallback)
950{
Courtney Goeltzenleuchter0daf2282015-06-13 21:22:12 -0600951 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600952 return layer_destroy_msg_callback(instance, pTable, msgCallback);
953}
954
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600955VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice device, const char* pName)
Chris Forbes2778f302015-04-02 13:22:31 +1300956{
Jon Ashburn8d1b0b52015-05-18 13:20:15 -0600957 if (device == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300958 return NULL;
959
Chris Forbesb6b8c462015-04-15 06:59:41 +1200960 loader_platform_thread_once(&g_initOnce, initLayer);
961
Jon Ashburn8fd08252015-05-28 16:25:02 -0600962 /* loader uses this to force layer initialization; device object is wrapped */
963 if (!strcmp("vkGetDeviceProcAddr", pName)) {
Jon Ashburn9eed2892015-06-01 10:02:09 -0600964 initDeviceTable((const VkBaseLayerObject *) device);
Jon Ashburn8fd08252015-05-28 16:25:02 -0600965 return (void *) vkGetDeviceProcAddr;
966 }
967
Chris Forbes2778f302015-04-02 13:22:31 +1300968#define ADD_HOOK(fn) \
969 if (!strncmp(#fn, pName, sizeof(#fn))) \
970 return (void *) fn
971
Chris Forbes2778f302015-04-02 13:22:31 +1300972 ADD_HOOK(vkCreateShader);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -0600973 ADD_HOOK(vkDestroyDevice);
Chris Forbes4175e6f2015-04-08 10:15:35 +1200974 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbes39d8d752015-06-04 20:27:09 +1200975 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Jon Ashburne59f84f2015-05-18 09:08:41 -0600976#undef ADD_HOOK
Jon Ashburn9eed2892015-06-01 10:02:09 -0600977 VkLayerDispatchTable* pTable = device_dispatch_table(device);
Jon Ashburn8fd08252015-05-28 16:25:02 -0600978 if (pTable->GetDeviceProcAddr == NULL)
Chris Forbes2778f302015-04-02 13:22:31 +1300979 return NULL;
Jon Ashburn8fd08252015-05-28 16:25:02 -0600980 return pTable->GetDeviceProcAddr(device, pName);
Jon Ashburnf6b33db2015-05-05 14:22:52 -0600981}
982
983VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
984{
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -0600985 void *fptr;
986
Jon Ashburnf6b33db2015-05-05 14:22:52 -0600987 if (inst == NULL)
988 return NULL;
989
Jon Ashburn8c5cbcf2015-05-07 10:27:37 -0600990 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburnf6b33db2015-05-05 14:22:52 -0600991
Jon Ashburn8fd08252015-05-28 16:25:02 -0600992 if (!strcmp("vkGetInstanceProcAddr", pName)) {
Jon Ashburn9eed2892015-06-01 10:02:09 -0600993 initInstanceTable((const VkBaseLayerObject *) inst);
Jon Ashburn8fd08252015-05-28 16:25:02 -0600994 return (void *) vkGetInstanceProcAddr;
995 }
Jon Ashburnf6b33db2015-05-05 14:22:52 -0600996#define ADD_HOOK(fn) \
997 if (!strncmp(#fn, pName, sizeof(#fn))) \
998 return (void *) fn
999
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -06001000 ADD_HOOK(vkCreateInstance);
Jon Ashburn9a8a2e22015-05-19 16:34:53 -06001001 ADD_HOOK(vkDestroyInstance);
Tony Barbour59a47322015-06-24 16:06:58 -06001002 ADD_HOOK(vkGetGlobalExtensionProperties);
1003 ADD_HOOK(vkGetGlobalExtensionCount);
1004 ADD_HOOK(vkGetPhysicalDeviceExtensionProperties);
1005 ADD_HOOK(vkGetPhysicalDeviceExtensionCount);
Jon Ashburne59f84f2015-05-18 09:08:41 -06001006#undef ADD_HOOK
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001007
Courtney Goeltzenleuchterbfd2c662015-06-01 14:46:33 -06001008 fptr = msg_callback_get_proc_addr(pName);
1009 if (fptr)
1010 return fptr;
1011
Jon Ashburn9eed2892015-06-01 10:02:09 -06001012 VkLayerInstanceDispatchTable* pTable = instance_dispatch_table(inst);
Jon Ashburn8fd08252015-05-28 16:25:02 -06001013 if (pTable->GetInstanceProcAddr == NULL)
Jon Ashburnf6b33db2015-05-05 14:22:52 -06001014 return NULL;
Jon Ashburn8fd08252015-05-28 16:25:02 -06001015 return pTable->GetInstanceProcAddr(inst, pName);
Chris Forbes2778f302015-04-02 13:22:31 +13001016}