blob: 8d1a865f8891efb8f938530fc97c6200cdb1e559 [file] [log] [blame]
Chris Forbesaab9d112015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes67cc36f2015-04-13 12:14:52 +120027#include <map>
Chris Forbesaab9d112015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbesbb164b62015-04-08 10:19:16 +120029#include <map>
Chris Forbes4396ff52015-04-08 10:11:59 +120030#include <vector>
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -060031#include <string>
Tobin Ehlis7a51d902015-07-03 10:34:49 -060032#include "vk_loader_platform.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130033#include "vk_dispatch_table_helper.h"
Tobin Ehlis2d1d9702015-07-03 09:42:57 -060034#include "vk_layer.h"
Tobin Ehlis56d204a2015-07-03 10:15:26 -060035#include "vk_layer_config.h"
36#include "vk_layer_msg.h"
37#include "vk_layer_table.h"
Chris Forbese20111c2015-07-03 13:50:24 +120038#include "vk_layer_logging.h"
Chris Forbes3317b382015-05-04 14:04:24 +120039#include "vk_enum_string_helper.h"
Chris Forbes5c75afe2015-04-17 10:13:28 +120040#include "shader_checker.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130041// The following is #included again to catch certain OS-specific functions
42// being used:
Tobin Ehlis7a51d902015-07-03 10:34:49 -060043#include "vk_loader_platform.h"
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -060044#include "vk_layer_extension_utils.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130045
Chris Forbes32e3b462015-05-09 10:31:21 +120046#include "spirv/spirv.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130047
Chris Forbesaab9d112015-04-02 13:22:31 +130048
Chris Forbese20111c2015-07-03 13:50:24 +120049typedef struct _layer_data {
50 debug_report_data *report_data;
51 // TODO: put instance data here
52 VkDbgMsgCallback logging_callback;
53} layer_data;
54
55static std::unordered_map<void *, layer_data *> layer_data_map;
56static device_table_map shader_checker_device_table_map;
57static instance_table_map shader_checker_instance_table_map;
58
59
60template layer_data *get_my_data_ptr<layer_data>(
61 void *data_key,
62 std::unordered_map<void *, layer_data *> &data_map);
63
64debug_report_data *mdd(VkObject object)
65{
66 dispatch_key key = get_dispatch_key(object);
67 layer_data *my_data = get_my_data_ptr(key, layer_data_map);
68#if DISPATCH_MAP_DEBUG
69 fprintf(stderr, "MDD: map: %p, object: %p, key: %p, data: %p\n", &layer_data_map, object, key, my_data);
70#endif
71 return my_data->report_data;
72}
73
74debug_report_data *mid(VkInstance object)
75{
76 dispatch_key key = get_dispatch_key(object);
77 layer_data *my_data = get_my_data_ptr(get_dispatch_key(object), layer_data_map);
78#if DISPATCH_MAP_DEBUG
79 fprintf(stderr, "MID: map: %p, object: %p, key: %p, data: %p\n", &layer_data_map, object, key, my_data);
80#endif
81 return my_data->report_data;
82}
83
Chris Forbes1b466bd2015-04-15 06:59:41 +120084static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes1ed0f982015-05-29 14:55:18 +120085// TODO : This can be much smarter, using separate locks for separate global data
86static int globalLockInitialized = 0;
87static loader_platform_thread_mutex globalLock;
Chris Forbes4396ff52015-04-08 10:11:59 +120088
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120089
90static void
91build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
92{
93 unsigned int const *code = (unsigned int const *)&words[0];
94 size_t size = words.size();
95
96 unsigned word = 5;
97 while (word < size) {
98 unsigned opcode = code[word] & 0x0ffffu;
99 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
100
101 switch (opcode) {
102 case spv::OpTypeVoid:
103 case spv::OpTypeBool:
104 case spv::OpTypeInt:
105 case spv::OpTypeFloat:
106 case spv::OpTypeVector:
107 case spv::OpTypeMatrix:
108 case spv::OpTypeSampler:
109 case spv::OpTypeFilter:
110 case spv::OpTypeArray:
111 case spv::OpTypeRuntimeArray:
112 case spv::OpTypeStruct:
113 case spv::OpTypeOpaque:
114 case spv::OpTypePointer:
115 case spv::OpTypeFunction:
116 case spv::OpTypeEvent:
117 case spv::OpTypeDeviceEvent:
118 case spv::OpTypeReserveId:
119 case spv::OpTypeQueue:
120 case spv::OpTypePipe:
121 type_def_index[code[word+1]] = word;
122 break;
123
124 default:
125 /* We only care about type definitions */
126 break;
127 }
128
129 word += oplen;
130 }
131}
132
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600133struct shader_module {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200134 /* the spirv image itself */
Chris Forbes4396ff52015-04-08 10:11:59 +1200135 std::vector<uint32_t> words;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200136 /* a mapping of <id> to the first word of its def. this is useful because walking type
137 * trees requires jumping all over the instruction stream.
138 */
139 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes4453c772015-06-05 15:01:08 +1200140 bool is_spirv;
Chris Forbes4396ff52015-04-08 10:11:59 +1200141
Chris Forbese20111c2015-07-03 13:50:24 +1200142 shader_module(VkDevice dev, VkShaderModuleCreateInfo const *pCreateInfo) :
Chris Forbes4453c772015-06-05 15:01:08 +1200143 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
144 type_def_index(),
145 is_spirv(true) {
146
147 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
Chris Forbese20111c2015-07-03 13:50:24 +1200148 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
149 "Shader is not SPIR-V, most checks will not be possible");
Chris Forbes4453c772015-06-05 15:01:08 +1200150 is_spirv = false;
151 return;
152 }
153
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200154
155 build_type_def_index(words, type_def_index);
Chris Forbes4396ff52015-04-08 10:11:59 +1200156 }
157};
158
159
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600160static std::unordered_map<void *, shader_module *> shader_module_map;
161
162struct shader_object {
163 std::string name;
164 struct shader_module *module;
165
166 shader_object(VkShaderCreateInfo const *pCreateInfo)
167 {
168 module = shader_module_map[pCreateInfo->module];
169 name = pCreateInfo->pName;
170 }
171};
172static std::unordered_map<void *, shader_object *> shader_object_map;
Chris Forbes4396ff52015-04-08 10:11:59 +1200173
174
Chris Forbes1b466bd2015-04-15 06:59:41 +1200175static void
Chris Forbese20111c2015-07-03 13:50:24 +1200176init_shader_checker(layer_data *my_data)
Chris Forbes1b466bd2015-04-15 06:59:41 +1200177{
Chris Forbese20111c2015-07-03 13:50:24 +1200178 uint32_t report_flags = 0;
179 uint32_t debug_action = 0;
180 FILE *log_output = NULL;
181 const char *option_str;
Chris Forbes1b466bd2015-04-15 06:59:41 +1200182 // initialize ShaderChecker options
Chris Forbese20111c2015-07-03 13:50:24 +1200183 report_flags = getLayerOptionFlags("ShaderCheckerReportFlags", 0);
184 getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &debug_action);
Chris Forbes1b466bd2015-04-15 06:59:41 +1200185
Chris Forbese20111c2015-07-03 13:50:24 +1200186 if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
Chris Forbes1b466bd2015-04-15 06:59:41 +1200187 {
Chris Forbese20111c2015-07-03 13:50:24 +1200188 option_str = getLayerOption("ShaderCheckerLogFilename");
189 if (option_str)
Chris Forbes1b466bd2015-04-15 06:59:41 +1200190 {
Chris Forbese20111c2015-07-03 13:50:24 +1200191 log_output = fopen(option_str, "w");
Chris Forbes1b466bd2015-04-15 06:59:41 +1200192 }
Chris Forbese20111c2015-07-03 13:50:24 +1200193 if (log_output == NULL)
194 log_output = stdout;
195
196 layer_create_msg_callback(my_data->report_data, report_flags, log_callback, (void *) log_output, &my_data->logging_callback);
197 }
198
199 if (!globalLockInitialized)
200 {
201 // TODO/TBD: Need to delete this mutex sometime. How??? One
202 // suggestion is to call this during vkCreateInstance(), and then we
203 // can clean it up during vkDestroyInstance(). However, that requires
204 // that the layer have per-instance locks. We need to come back and
205 // address this soon.
206 loader_platform_thread_create_mutex(&globalLock);
207 globalLockInitialized = 1;
Chris Forbes1b466bd2015-04-15 06:59:41 +1200208 }
209}
210
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600211static const VkLayerProperties shader_checker_global_layers[] = {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600212 {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600213 "ShaderChecker",
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600214 VK_API_VERSION,
215 VK_MAKE_VERSION(0, 1, 0),
216 "Validation layer: ShaderChecker",
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600217 }
Chris Forbesaab9d112015-04-02 13:22:31 +1300218};
219
Tony Barbour426b9052015-06-24 16:06:58 -0600220VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionProperties(
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600221 const char *pLayerName,
222 uint32_t *pCount,
223 VkExtensionProperties* pProperties)
Chris Forbesaab9d112015-04-02 13:22:31 +1300224{
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600225 /* shader checker does not have any global extensions */
226 return util_GetExtensionProperties(0, NULL, pCount, pProperties);
Chris Forbesaab9d112015-04-02 13:22:31 +1300227}
228
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600229VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalLayerProperties(
230 uint32_t *pCount,
231 VkLayerProperties* pProperties)
Tony Barbour426b9052015-06-24 16:06:58 -0600232{
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600233 return util_GetLayerProperties(ARRAY_SIZE(shader_checker_global_layers),
234 shader_checker_global_layers,
235 pCount, pProperties);
Tony Barbour426b9052015-06-24 16:06:58 -0600236}
237
238VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionProperties(
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600239 VkPhysicalDevice physicalDevice,
240 const char* pLayerName,
241 uint32_t* pCount,
242 VkExtensionProperties* pProperties)
Jon Ashburnade3bee2015-06-10 16:43:31 -0600243{
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600244 /* Shader checker does not have any physical device extensions */
245 return util_GetExtensionProperties(0, NULL, pCount, pProperties);
246}
Jon Ashburnade3bee2015-06-10 16:43:31 -0600247
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -0600248VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceLayerProperties(
249 VkPhysicalDevice physicalDevice,
250 uint32_t* pCount,
251 VkLayerProperties* pProperties)
252{
253 /* Shader checker physical device layers are the same as global */
254 return util_GetLayerProperties(ARRAY_SIZE(shader_checker_global_layers),
255 shader_checker_global_layers,
256 pCount, pProperties);
Jon Ashburnade3bee2015-06-10 16:43:31 -0600257}
Chris Forbesaab9d112015-04-02 13:22:31 +1300258
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200259static char const *
260storage_class_name(unsigned sc)
261{
262 switch (sc) {
Cody Northrop812b4612015-04-20 14:09:40 -0600263 case spv::StorageClassInput: return "input";
264 case spv::StorageClassOutput: return "output";
265 case spv::StorageClassUniformConstant: return "const uniform";
266 case spv::StorageClassUniform: return "uniform";
267 case spv::StorageClassWorkgroupLocal: return "workgroup local";
268 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
269 case spv::StorageClassPrivateGlobal: return "private global";
270 case spv::StorageClassFunction: return "function";
271 case spv::StorageClassGeneric: return "generic";
272 case spv::StorageClassPrivate: return "private";
273 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200274 default: return "unknown";
275 }
276}
277
278
279/* returns ptr to null terminator */
280static char *
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600281describe_type(char *dst, shader_module const *src, unsigned type)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200282{
283 auto type_def_it = src->type_def_index.find(type);
284
285 if (type_def_it == src->type_def_index.end()) {
286 return dst + sprintf(dst, "undef");
287 }
288
289 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
290 unsigned opcode = code[0] & 0x0ffffu;
291 switch (opcode) {
292 case spv::OpTypeBool:
293 return dst + sprintf(dst, "bool");
294 case spv::OpTypeInt:
295 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
296 case spv::OpTypeFloat:
297 return dst + sprintf(dst, "float%d", code[2]);
298 case spv::OpTypeVector:
299 dst += sprintf(dst, "vec%d of ", code[3]);
300 return describe_type(dst, src, code[2]);
301 case spv::OpTypeMatrix:
302 dst += sprintf(dst, "mat%d of ", code[3]);
303 return describe_type(dst, src, code[2]);
304 case spv::OpTypeArray:
305 dst += sprintf(dst, "arr[%d] of ", code[3]);
306 return describe_type(dst, src, code[2]);
307 case spv::OpTypePointer:
308 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
309 return describe_type(dst, src, code[3]);
310 case spv::OpTypeStruct:
311 {
312 unsigned oplen = code[0] >> 16;
313 dst += sprintf(dst, "struct of (");
Ian Elliottf21f14b2015-04-17 11:05:04 -0600314 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200315 dst = describe_type(dst, src, code[i]);
316 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
317 }
318 return dst;
319 }
320 default:
321 return dst + sprintf(dst, "oddtype");
322 }
323}
324
325
326static bool
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600327types_match(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200328{
329 auto a_type_def_it = a->type_def_index.find(a_type);
330 auto b_type_def_it = b->type_def_index.find(b_type);
331
332 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200333 return false;
334 }
335
336 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200337 return false;
338 }
339
340 /* walk two type trees together, and complain about differences */
341 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
342 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
343
344 unsigned a_opcode = a_code[0] & 0x0ffffu;
345 unsigned b_opcode = b_code[0] & 0x0ffffu;
346
Chris Forbes0a94a372015-06-05 14:57:05 +1200347 if (b_arrayed && b_opcode == spv::OpTypeArray) {
348 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
349 return types_match(a, b, a_type, b_code[2], false);
350 }
351
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200352 if (a_opcode != b_opcode) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200353 return false;
354 }
355
356 switch (a_opcode) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200357 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200358 case spv::OpTypeBool:
Chris Forbes0a94a372015-06-05 14:57:05 +1200359 return true && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200360 case spv::OpTypeInt:
361 /* match on width, signedness */
Chris Forbes0a94a372015-06-05 14:57:05 +1200362 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200363 case spv::OpTypeFloat:
364 /* match on width */
Chris Forbes0a94a372015-06-05 14:57:05 +1200365 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200366 case spv::OpTypeVector:
367 case spv::OpTypeMatrix:
368 case spv::OpTypeArray:
Chris Forbes0a94a372015-06-05 14:57:05 +1200369 /* match on element type, count. these all have the same layout. we don't get here if
370 * b_arrayed -- that is handled above. */
371 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200372 case spv::OpTypeStruct:
373 /* match on all element types */
374 {
Chris Forbes0a94a372015-06-05 14:57:05 +1200375 if (b_arrayed) {
376 /* for the purposes of matching different levels of arrayness, structs are leaves. */
377 return false;
378 }
379
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200380 unsigned a_len = a_code[0] >> 16;
381 unsigned b_len = b_code[0] >> 16;
382
383 if (a_len != b_len) {
384 return false; /* structs cannot match if member counts differ */
385 }
386
Ian Elliottf21f14b2015-04-17 11:05:04 -0600387 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200388 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200389 return false;
390 }
391 }
392
393 return true;
394 }
395 case spv::OpTypePointer:
396 /* match on pointee type. storage class is expected to differ */
Chris Forbes0a94a372015-06-05 14:57:05 +1200397 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200398
399 default:
400 /* remaining types are CLisms, or may not appear in the interfaces we
401 * are interested in. Just claim no match.
402 */
403 return false;
404
405 }
406}
407
408
Chris Forbes67cc36f2015-04-13 12:14:52 +1200409static int
410value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
411{
412 auto it = map.find(id);
413 if (it == map.end())
414 return def;
415 else
416 return it->second;
417}
418
419
420struct interface_var {
421 uint32_t id;
422 uint32_t type_id;
423 /* TODO: collect the name, too? Isn't required to be present. */
424};
425
426
427static void
Chris Forbese20111c2015-07-03 13:50:24 +1200428collect_interface_by_location(VkDevice dev,
429 shader_module const *src, spv::StorageClass sinterface,
Chris Forbes67cc36f2015-04-13 12:14:52 +1200430 std::map<uint32_t, interface_var> &out,
431 std::map<uint32_t, interface_var> &builtins_out)
432{
433 unsigned int const *code = (unsigned int const *)&src->words[0];
434 size_t size = src->words.size();
435
Chris Forbes67cc36f2015-04-13 12:14:52 +1200436 std::unordered_map<unsigned, unsigned> var_locations;
437 std::unordered_map<unsigned, unsigned> var_builtins;
438
439 unsigned word = 5;
440 while (word < size) {
441
442 unsigned opcode = code[word] & 0x0ffffu;
443 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
444
445 /* We consider two interface models: SSO rendezvous-by-location, and
446 * builtins. Complain about anything that fits neither model.
447 */
448 if (opcode == spv::OpDecorate) {
Cody Northrop812b4612015-04-20 14:09:40 -0600449 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200450 var_locations[code[word+1]] = code[word+3];
451 }
452
Cody Northrop812b4612015-04-20 14:09:40 -0600453 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200454 var_builtins[code[word+1]] = code[word+3];
455 }
456 }
457
458 /* TODO: handle grouped decorations */
459 /* TODO: handle index=1 dual source outputs from FS -- two vars will
460 * have the same location, and we DONT want to clobber. */
461
Ian Elliottf21f14b2015-04-17 11:05:04 -0600462 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200463 int location = value_or_default(var_locations, code[word+2], -1);
464 int builtin = value_or_default(var_builtins, code[word+2], -1);
465
466 if (location == -1 && builtin == -1) {
467 /* No location defined, and not bound to an API builtin.
468 * The spec says nothing about how this case works (or doesn't)
469 * for interface matching.
470 */
Chris Forbese20111c2015-07-03 13:50:24 +1200471 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC",
472 "var %d (type %d) in %s interface has no Location or Builtin decoration",
473 code[word+2], code[word+1], storage_class_name(sinterface));
Chris Forbes67cc36f2015-04-13 12:14:52 +1200474 }
475 else if (location != -1) {
476 /* A user-defined interface variable, with a location. */
477 interface_var v;
478 v.id = code[word+2];
479 v.type_id = code[word+1];
480 out[location] = v;
481 }
482 else {
483 /* A builtin interface variable */
484 interface_var v;
485 v.id = code[word+2];
486 v.type_id = code[word+1];
487 builtins_out[builtin] = v;
488 }
489 }
490
491 word += oplen;
492 }
493}
494
495
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600496VK_LAYER_EXPORT VkResult VKAPI vkCreateShaderModule(
497 VkDevice device,
498 const VkShaderModuleCreateInfo *pCreateInfo,
499 VkShaderModule *pShaderModule)
Chris Forbesaab9d112015-04-02 13:22:31 +1300500{
Chris Forbes1ed0f982015-05-29 14:55:18 +1200501 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbese20111c2015-07-03 13:50:24 +1200502 VkResult res = get_dispatch_table(shader_checker_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pShaderModule);
Chris Forbes4396ff52015-04-08 10:11:59 +1200503
Chris Forbese20111c2015-07-03 13:50:24 +1200504 shader_module_map[(VkBaseLayerObject *) *pShaderModule] = new shader_module(device, pCreateInfo);
Chris Forbes1ed0f982015-05-29 14:55:18 +1200505 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesaab9d112015-04-02 13:22:31 +1300506 return res;
507}
508
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600509VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(
510 VkDevice device,
511 const VkShaderCreateInfo *pCreateInfo,
512 VkShader *pShader)
513{
514 loader_platform_thread_lock_mutex(&globalLock);
Chris Forbese20111c2015-07-03 13:50:24 +1200515 VkResult res = get_dispatch_table(shader_checker_device_table_map, device)->CreateShader(device, pCreateInfo, pShader);
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600516
517 shader_object_map[(VkBaseLayerObject *) *pShader] = new shader_object(pCreateInfo);
518 loader_platform_thread_unlock_mutex(&globalLock);
519 return res;
520}
Chris Forbesaab9d112015-04-02 13:22:31 +1300521
Chris Forbes5f362d02015-05-25 11:13:22 +1200522static bool
Chris Forbese20111c2015-07-03 13:50:24 +1200523validate_interface_between_stages(VkDevice dev,
524 shader_module const *producer, char const *producer_name,
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600525 shader_module const *consumer, char const *consumer_name,
Chris Forbes4453c772015-06-05 15:01:08 +1200526 bool consumer_arrayed_input)
Chris Forbesbb164b62015-04-08 10:19:16 +1200527{
528 std::map<uint32_t, interface_var> outputs;
529 std::map<uint32_t, interface_var> inputs;
530
531 std::map<uint32_t, interface_var> builtin_outputs;
532 std::map<uint32_t, interface_var> builtin_inputs;
533
Chris Forbes5c75afe2015-04-17 10:13:28 +1200534 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200535 bool pass = true;
Chris Forbesbb164b62015-04-08 10:19:16 +1200536
Chris Forbese20111c2015-07-03 13:50:24 +1200537 collect_interface_by_location(dev, producer, spv::StorageClassOutput, outputs, builtin_outputs);
538 collect_interface_by_location(dev, consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesbb164b62015-04-08 10:19:16 +1200539
540 auto a_it = outputs.begin();
541 auto b_it = inputs.begin();
542
543 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedof5997ab2015-04-27 16:36:17 -0600544 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
545 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
546 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200547 auto a_first = a_at_end ? 0 : a_it->first;
548 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600549
550 if (b_at_end || a_first < b_first) {
Chris Forbese20111c2015-07-03 13:50:24 +1200551 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
552 "%s writes to output location %d which is not consumed by %s", producer_name, a_first, consumer_name);
Chris Forbesbb164b62015-04-08 10:19:16 +1200553 a_it++;
554 }
David Pinedof5997ab2015-04-27 16:36:17 -0600555 else if (a_at_end || a_first > b_first) {
Chris Forbese20111c2015-07-03 13:50:24 +1200556 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
557 "%s consumes input location %d which is not written by %s", consumer_name, b_first, producer_name);
Chris Forbes5f362d02015-05-25 11:13:22 +1200558 pass = false;
Chris Forbesbb164b62015-04-08 10:19:16 +1200559 b_it++;
560 }
561 else {
Chris Forbes4453c772015-06-05 15:01:08 +1200562 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200563 /* OK! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200564 }
565 else {
566 char producer_type[1024];
567 char consumer_type[1024];
568 describe_type(producer_type, producer, a_it->second.type_id);
569 describe_type(consumer_type, consumer, b_it->second.type_id);
570
Chris Forbese20111c2015-07-03 13:50:24 +1200571 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
572 "Type mismatch on location %d: '%s' vs '%s'", a_it->first, producer_type, consumer_type);
Chris Forbes5f362d02015-05-25 11:13:22 +1200573 pass = false;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200574 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200575 a_it++;
576 b_it++;
577 }
578 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200579
580 return pass;
Chris Forbesbb164b62015-04-08 10:19:16 +1200581}
582
583
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200584enum FORMAT_TYPE {
585 FORMAT_TYPE_UNDEFINED,
586 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
587 FORMAT_TYPE_SINT,
588 FORMAT_TYPE_UINT,
589};
590
591
592static unsigned
593get_format_type(VkFormat fmt) {
594 switch (fmt) {
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800595 case VK_FORMAT_UNDEFINED:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200596 return FORMAT_TYPE_UNDEFINED;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800597 case VK_FORMAT_R8_SINT:
598 case VK_FORMAT_R8G8_SINT:
599 case VK_FORMAT_R8G8B8_SINT:
600 case VK_FORMAT_R8G8B8A8_SINT:
601 case VK_FORMAT_R16_SINT:
602 case VK_FORMAT_R16G16_SINT:
603 case VK_FORMAT_R16G16B16_SINT:
604 case VK_FORMAT_R16G16B16A16_SINT:
605 case VK_FORMAT_R32_SINT:
606 case VK_FORMAT_R32G32_SINT:
607 case VK_FORMAT_R32G32B32_SINT:
608 case VK_FORMAT_R32G32B32A32_SINT:
609 case VK_FORMAT_B8G8R8_SINT:
610 case VK_FORMAT_B8G8R8A8_SINT:
611 case VK_FORMAT_R10G10B10A2_SINT:
612 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200613 return FORMAT_TYPE_SINT;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800614 case VK_FORMAT_R8_UINT:
615 case VK_FORMAT_R8G8_UINT:
616 case VK_FORMAT_R8G8B8_UINT:
617 case VK_FORMAT_R8G8B8A8_UINT:
618 case VK_FORMAT_R16_UINT:
619 case VK_FORMAT_R16G16_UINT:
620 case VK_FORMAT_R16G16B16_UINT:
621 case VK_FORMAT_R16G16B16A16_UINT:
622 case VK_FORMAT_R32_UINT:
623 case VK_FORMAT_R32G32_UINT:
624 case VK_FORMAT_R32G32B32_UINT:
625 case VK_FORMAT_R32G32B32A32_UINT:
626 case VK_FORMAT_B8G8R8_UINT:
627 case VK_FORMAT_B8G8R8A8_UINT:
628 case VK_FORMAT_R10G10B10A2_UINT:
629 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200630 return FORMAT_TYPE_UINT;
631 default:
632 return FORMAT_TYPE_FLOAT;
633 }
634}
635
636
Chris Forbes28c50882015-05-04 14:04:06 +1200637/* characterizes a SPIR-V type appearing in an interface to a FF stage,
638 * for comparison to a VkFormat's characterization above. */
639static unsigned
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600640get_fundamental_type(shader_module const *src, unsigned type)
Chris Forbes28c50882015-05-04 14:04:06 +1200641{
642 auto type_def_it = src->type_def_index.find(type);
643
644 if (type_def_it == src->type_def_index.end()) {
645 return FORMAT_TYPE_UNDEFINED;
646 }
647
648 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
649 unsigned opcode = code[0] & 0x0ffffu;
650 switch (opcode) {
651 case spv::OpTypeInt:
652 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
653 case spv::OpTypeFloat:
654 return FORMAT_TYPE_FLOAT;
655 case spv::OpTypeVector:
656 return get_fundamental_type(src, code[2]);
657 case spv::OpTypeMatrix:
658 return get_fundamental_type(src, code[2]);
659 case spv::OpTypeArray:
660 return get_fundamental_type(src, code[2]);
661 case spv::OpTypePointer:
662 return get_fundamental_type(src, code[3]);
663 default:
664 return FORMAT_TYPE_UNDEFINED;
665 }
666}
667
668
Chris Forbes5f362d02015-05-25 11:13:22 +1200669static bool
Chris Forbese20111c2015-07-03 13:50:24 +1200670validate_vi_consistency(VkDevice dev, VkPipelineVertexInputStateCreateInfo const *vi)
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200671{
672 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
673 * each binding should be specified only once.
674 */
675 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200676 bool pass = true;
677
678 for (unsigned i = 0; i < vi->bindingCount; i++) {
679 auto desc = &vi->pVertexBindingDescriptions[i];
680 auto & binding = bindings[desc->binding];
681 if (binding) {
Chris Forbese20111c2015-07-03 13:50:24 +1200682 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC",
683 "Duplicate vertex input binding descriptions for binding %d", desc->binding);
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200684 pass = false;
685 }
686 else {
687 binding = desc;
688 }
689 }
690
691 return pass;
692}
693
694
695static bool
Chris Forbese20111c2015-07-03 13:50:24 +1200696validate_vi_against_vs_inputs(VkDevice dev, VkPipelineVertexInputStateCreateInfo const *vi, shader_module const *vs)
Chris Forbesfcd05f12015-04-08 10:36:37 +1200697{
698 std::map<uint32_t, interface_var> inputs;
699 /* we collect builtin inputs, but they will never appear in the VI state --
700 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
701 */
702 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes5f362d02015-05-25 11:13:22 +1200703 bool pass = true;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200704
Chris Forbese20111c2015-07-03 13:50:24 +1200705 collect_interface_by_location(dev, vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200706
707 /* Build index by location */
708 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes6f2ab982015-05-25 11:13:24 +1200709 if (vi) {
710 for (unsigned i = 0; i < vi->attributeCount; i++)
711 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
712 }
Chris Forbesfcd05f12015-04-08 10:36:37 +1200713
714 auto it_a = attribs.begin();
715 auto it_b = inputs.begin();
716
David Pinedof5997ab2015-04-27 16:36:17 -0600717 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
718 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
719 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200720 auto a_first = a_at_end ? 0 : it_a->first;
721 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600722 if (b_at_end || a_first < b_first) {
Chris Forbese20111c2015-07-03 13:50:24 +1200723 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
724 "Vertex attribute at location %d not consumed by VS", a_first);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200725 it_a++;
726 }
David Pinedof5997ab2015-04-27 16:36:17 -0600727 else if (a_at_end || b_first < a_first) {
Chris Forbese20111c2015-07-03 13:50:24 +1200728 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
729 "VS consumes input at location %d but not provided", b_first);
Chris Forbes5f362d02015-05-25 11:13:22 +1200730 pass = false;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200731 it_b++;
732 }
733 else {
Chris Forbes3317b382015-05-04 14:04:24 +1200734 unsigned attrib_type = get_format_type(it_a->second->format);
735 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
736
737 /* type checking */
738 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
739 char vs_type[1024];
740 describe_type(vs_type, vs, it_b->second.type_id);
Chris Forbese20111c2015-07-03 13:50:24 +1200741 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
742 "Attribute type of `%s` at location %d does not match VS input type of `%s`",
Chris Forbes3317b382015-05-04 14:04:24 +1200743 string_VkFormat(it_a->second->format), a_first, vs_type);
Chris Forbes5f362d02015-05-25 11:13:22 +1200744 pass = false;
Chris Forbes3317b382015-05-04 14:04:24 +1200745 }
746
Chris Forbes5c75afe2015-04-17 10:13:28 +1200747 /* OK! */
Chris Forbesfcd05f12015-04-08 10:36:37 +1200748 it_a++;
749 it_b++;
750 }
751 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200752
753 return pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200754}
755
756
Chris Forbes5f362d02015-05-25 11:13:22 +1200757static bool
Chris Forbese20111c2015-07-03 13:50:24 +1200758validate_fs_outputs_against_cb(VkDevice dev, shader_module const *fs, VkPipelineCbStateCreateInfo const *cb)
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200759{
760 std::map<uint32_t, interface_var> outputs;
761 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes5f362d02015-05-25 11:13:22 +1200762 bool pass = true;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200763
764 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
765
Chris Forbese20111c2015-07-03 13:50:24 +1200766 collect_interface_by_location(dev, fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200767
768 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
769 * and all color attachment should be UNORM/SNORM/FLOAT.
770 */
771 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200772 if (outputs.size()) {
Chris Forbese20111c2015-07-03 13:50:24 +1200773 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
774 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200775 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200776 }
777
Ian Elliottf21f14b2015-04-17 11:05:04 -0600778 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200779 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
780 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Chris Forbese20111c2015-07-03 13:50:24 +1200781 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
782 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200783 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200784 }
785 }
786
Chris Forbes5f362d02015-05-25 11:13:22 +1200787 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200788 }
789
790 auto it = outputs.begin();
791 uint32_t attachment = 0;
792
793 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
794 * are currently dense, but the parallel with matching between shader stages is nice.
795 */
796
Chris Forbes8802c992015-05-05 11:34:14 +1200797 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan7a62cbe2015-06-01 19:48:11 +0200798 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbese20111c2015-07-03 13:50:24 +1200799 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
800 "FS writes to output location %d with no matching attachment", it->first);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200801 it++;
802 }
803 else if (it == outputs.end() || it->first > attachment) {
Chris Forbese20111c2015-07-03 13:50:24 +1200804 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
805 "Attachment %d not written by FS", attachment);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200806 attachment++;
Chris Forbes5f362d02015-05-25 11:13:22 +1200807 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200808 }
809 else {
Chris Forbes4b009002015-05-04 14:20:10 +1200810 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
811 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
812
813 /* type checking */
814 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
815 char fs_type[1024];
816 describe_type(fs_type, fs, it->second.type_id);
Chris Forbese20111c2015-07-03 13:50:24 +1200817 log_msg(mdd(dev), VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
818 "Attachment %d of type `%s` does not match FS output type of `%s`",
Chris Forbes4b009002015-05-04 14:20:10 +1200819 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
Chris Forbes5f362d02015-05-25 11:13:22 +1200820 pass = false;
Chris Forbes4b009002015-05-04 14:20:10 +1200821 }
822
Chris Forbes5c75afe2015-04-17 10:13:28 +1200823 /* OK! */
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200824 it++;
825 attachment++;
826 }
827 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200828
829 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200830}
831
832
Chris Forbes4453c772015-06-05 15:01:08 +1200833struct shader_stage_attributes {
834 char const * const name;
835 bool arrayed_input;
836};
837
838
839static shader_stage_attributes
840shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
841 { "vertex shader", false },
842 { "tessellation control shader", true },
843 { "tessellation evaluation shader", false },
844 { "geometry shader", true },
845 { "fragment shader", false },
846};
847
848
Jon Ashburn0d60d272015-07-09 15:02:25 -0600849//TODO handle count > 1
Chris Forbesf1060ca2015-06-04 20:23:00 +1200850static bool
Jon Ashburn0d60d272015-07-09 15:02:25 -0600851validate_graphics_pipeline(VkDevice dev, uint32_t count, VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes60540932015-04-08 10:15:35 +1200852{
Chris Forbes8f600932015-04-08 10:16:45 +1200853 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
854 * before trying to do anything more: */
855
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600856 shader_module const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
Chris Forbes4453c772015-06-05 15:01:08 +1200857 memset(shaders, 0, sizeof(shaders));
Chris Forbes8f600932015-04-08 10:16:45 +1200858 VkPipelineCbStateCreateInfo const *cb = 0;
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600859 VkPipelineVertexInputStateCreateInfo const *vi = 0;
Chris Forbes5f362d02015-05-25 11:13:22 +1200860 bool pass = true;
Chris Forbes8f600932015-04-08 10:16:45 +1200861
Chris Forbes1ed0f982015-05-29 14:55:18 +1200862 loader_platform_thread_lock_mutex(&globalLock);
863
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600864 for (auto i = 0; i < pCreateInfo->stageCount; i++) {
865 VkPipelineShaderStageCreateInfo const *pStage = &pCreateInfo->pStages[i];
866 if (pStage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
Chris Forbes8f600932015-04-08 10:16:45 +1200867
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600868 if (pStage->stage < VK_SHADER_STAGE_VERTEX || pStage->stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbese20111c2015-07-03 13:50:24 +1200869 log_msg(mdd(dev), VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC",
870 "Unknown shader stage %d", pStage->stage);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200871 }
Chris Forbes4453c772015-06-05 15:01:08 +1200872 else {
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -0600873 struct shader_object *shader = shader_object_map[(void *) pStage->shader];
874 shaders[pStage->stage] = shader->module;
Chris Forbes4453c772015-06-05 15:01:08 +1200875 }
Chris Forbes8f600932015-04-08 10:16:45 +1200876 }
Chris Forbes8f600932015-04-08 10:16:45 +1200877 }
878
Mark Lobodzinski0e0fb5c2015-06-23 15:11:57 -0600879 cb = pCreateInfo->pCbState;
880 vi = pCreateInfo->pVertexInputState;
881
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200882 if (vi) {
Chris Forbese20111c2015-07-03 13:50:24 +1200883 pass = validate_vi_consistency(dev, vi) && pass;
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200884 }
885
Chris Forbes4453c772015-06-05 15:01:08 +1200886 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
Chris Forbese20111c2015-07-03 13:50:24 +1200887 pass = validate_vi_against_vs_inputs(dev, vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200888 }
889
Chris Forbes4453c772015-06-05 15:01:08 +1200890 /* TODO: enforce rules about present combinations of shaders */
891 int producer = VK_SHADER_STAGE_VERTEX;
892 int consumer = VK_SHADER_STAGE_GEOMETRY;
893
894 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
895 producer++;
896 consumer++;
Chris Forbesbb164b62015-04-08 10:19:16 +1200897 }
898
Tony Barbour4eb3cd12015-06-11 15:04:25 -0600899 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbes4453c772015-06-05 15:01:08 +1200900 assert(shaders[producer]);
901 if (shaders[consumer]) {
902 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
Chris Forbese20111c2015-07-03 13:50:24 +1200903 pass = validate_interface_between_stages(dev,
904 shaders[producer], shader_stage_attribs[producer].name,
Chris Forbes4453c772015-06-05 15:01:08 +1200905 shaders[consumer], shader_stage_attribs[consumer].name,
906 shader_stage_attribs[consumer].arrayed_input) && pass;
907 }
908
909 producer = consumer;
910 }
911 }
912
913 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
Chris Forbese20111c2015-07-03 13:50:24 +1200914 pass = validate_fs_outputs_against_cb(dev, shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200915 }
916
Chris Forbes1ed0f982015-05-29 14:55:18 +1200917 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesf1060ca2015-06-04 20:23:00 +1200918 return pass;
919}
920
Jon Ashburn0d60d272015-07-09 15:02:25 -0600921//TODO handle pipelineCache entry points
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200922VK_LAYER_EXPORT VkResult VKAPI
Jon Ashburn0d60d272015-07-09 15:02:25 -0600923vkCreateGraphicsPipelines(VkDevice device,
924 VkPipelineCache pipelineCache,
925 uint32_t count,
926 const VkGraphicsPipelineCreateInfo *pCreateInfos,
927 VkPipeline *pPipelines)
Chris Forbesf1060ca2015-06-04 20:23:00 +1200928{
Jon Ashburn0d60d272015-07-09 15:02:25 -0600929 bool pass = validate_graphics_pipeline(device, count, pCreateInfos);
Chris Forbes5f362d02015-05-25 11:13:22 +1200930
931 if (pass) {
932 /* The driver is allowed to crash if passed junk. Only actually create the
933 * pipeline if we didn't run into any showstoppers above.
934 */
Jon Ashburn0d60d272015-07-09 15:02:25 -0600935 return get_dispatch_table(shader_checker_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pPipelines);
Chris Forbes5f362d02015-05-25 11:13:22 +1200936 }
937 else {
938 return VK_ERROR_UNKNOWN;
939 }
Chris Forbes60540932015-04-08 10:15:35 +1200940}
941
942
Chris Forbese20111c2015-07-03 13:50:24 +1200943VK_LAYER_EXPORT VkResult VKAPI vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, VkDevice* pDevice)
944{
945 VkLayerDispatchTable *pDeviceTable = get_dispatch_table(shader_checker_device_table_map, *pDevice);
946 VkResult result = pDeviceTable->CreateDevice(gpu, pCreateInfo, pDevice);
947 if (result == VK_SUCCESS) {
948 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
949 VkLayerDispatchTable *pTable = get_dispatch_table(shader_checker_device_table_map, *pDevice);
950 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
951 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
952 }
953 return result;
954}
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200955
Jon Ashburn17f37372015-05-19 16:34:53 -0600956/* hook DextroyDevice to remove tableMap entry */
957VK_LAYER_EXPORT VkResult VKAPI vkDestroyDevice(VkDevice device)
958{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600959 dispatch_key key = get_dispatch_key(device);
Chris Forbese20111c2015-07-03 13:50:24 +1200960 VkLayerDispatchTable *pDisp = get_dispatch_table(shader_checker_device_table_map, device);
961 VkResult result = pDisp->DestroyDevice(device);
962 shader_checker_device_table_map.erase(key);
963 return result;
Jon Ashburn17f37372015-05-19 16:34:53 -0600964}
965
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600966VkResult VKAPI vkCreateInstance(
967 const VkInstanceCreateInfo* pCreateInfo,
968 VkInstance* pInstance)
969{
Chris Forbese20111c2015-07-03 13:50:24 +1200970 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(shader_checker_instance_table_map,*pInstance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600971 VkResult result = pTable->CreateInstance(pCreateInfo, pInstance);
972
973 if (result == VK_SUCCESS) {
Chris Forbese20111c2015-07-03 13:50:24 +1200974 layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
975 my_data->report_data = debug_report_create_instance(
976 pTable,
977 *pInstance,
978 pCreateInfo->extensionCount,
979 pCreateInfo->ppEnabledExtensionNames);
Courtney Goeltzenleuchterf4a2eba2015-06-08 14:58:39 -0600980
Chris Forbese20111c2015-07-03 13:50:24 +1200981 init_shader_checker(my_data);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600982 }
983 return result;
984}
985
Jon Ashburn17f37372015-05-19 16:34:53 -0600986/* hook DestroyInstance to remove tableInstanceMap entry */
987VK_LAYER_EXPORT VkResult VKAPI vkDestroyInstance(VkInstance instance)
988{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600989 dispatch_key key = get_dispatch_key(instance);
Chris Forbese20111c2015-07-03 13:50:24 +1200990 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(shader_checker_instance_table_map, instance);
991 VkResult res = pTable->DestroyInstance(instance);
992
993 // Clean up logging callback, if any
994 layer_data *my_data = get_my_data_ptr(key, layer_data_map);
995 if (my_data->logging_callback) {
996 layer_destroy_msg_callback(my_data->report_data, my_data->logging_callback);
997 }
998
999 layer_debug_report_destroy_instance(my_data->report_data);
1000 layer_data_map.erase(pTable);
1001
1002 shader_checker_instance_table_map.erase(key);
Jon Ashburn17f37372015-05-19 16:34:53 -06001003 return res;
1004}
Chris Forbesb65ba352015-05-25 11:12:59 +12001005
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001006VK_LAYER_EXPORT VkResult VKAPI vkDbgCreateMsgCallback(
Chris Forbese20111c2015-07-03 13:50:24 +12001007 VkInstance instance,
1008 VkFlags msgFlags,
1009 const PFN_vkDbgMsgCallback pfnMsgCallback,
1010 void* pUserData,
1011 VkDbgMsgCallback* pMsgCallback)
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001012{
Chris Forbese20111c2015-07-03 13:50:24 +12001013 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(shader_checker_instance_table_map, instance);
1014 VkResult res = pTable->DbgCreateMsgCallback(instance, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
1015 if (VK_SUCCESS == res) {
1016 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
1017 res = layer_create_msg_callback(my_data->report_data, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
1018 }
1019 return res;
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001020}
1021
1022VK_LAYER_EXPORT VkResult VKAPI vkDbgDestroyMsgCallback(
Chris Forbese20111c2015-07-03 13:50:24 +12001023 VkInstance instance,
1024 VkDbgMsgCallback msgCallback)
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001025{
Chris Forbese20111c2015-07-03 13:50:24 +12001026 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(shader_checker_instance_table_map, instance);
1027 VkResult res = pTable->DbgDestroyMsgCallback(instance, msgCallback);
1028 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
1029 layer_destroy_msg_callback(my_data->report_data, msgCallback);
1030 return res;
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001031}
1032
Chris Forbese20111c2015-07-03 13:50:24 +12001033VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice dev, const char* funcName)
Chris Forbesaab9d112015-04-02 13:22:31 +13001034{
Chris Forbese20111c2015-07-03 13:50:24 +12001035 if (dev == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +13001036 return NULL;
1037
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001038 /* loader uses this to force layer initialization; device object is wrapped */
Chris Forbese20111c2015-07-03 13:50:24 +12001039 if (!strcmp("vkGetDeviceProcAddr", funcName)) {
1040 initDeviceTable(shader_checker_device_table_map, (const VkBaseLayerObject *) dev);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001041 return (void *) vkGetDeviceProcAddr;
1042 }
1043
Chris Forbesaab9d112015-04-02 13:22:31 +13001044#define ADD_HOOK(fn) \
Chris Forbese20111c2015-07-03 13:50:24 +12001045 if (!strncmp(#fn, funcName, sizeof(#fn))) \
Chris Forbesaab9d112015-04-02 13:22:31 +13001046 return (void *) fn
1047
Chris Forbese20111c2015-07-03 13:50:24 +12001048 ADD_HOOK(vkCreateDevice);
Courtney Goeltzenleuchter2d034fd2015-06-28 13:01:17 -06001049 ADD_HOOK(vkCreateShaderModule);
Chris Forbesaab9d112015-04-02 13:22:31 +13001050 ADD_HOOK(vkCreateShader);
Jon Ashburn17f37372015-05-19 16:34:53 -06001051 ADD_HOOK(vkDestroyDevice);
Jon Ashburn0d60d272015-07-09 15:02:25 -06001052 ADD_HOOK(vkCreateGraphicsPipelines);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001053#undef ADD_HOOK
Chris Forbese20111c2015-07-03 13:50:24 +12001054
1055 VkLayerDispatchTable* pTable = get_dispatch_table(shader_checker_device_table_map, dev);
1056 {
1057 if (pTable->GetDeviceProcAddr == NULL)
1058 return NULL;
1059 return pTable->GetDeviceProcAddr(dev, funcName);
1060 }
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001061}
1062
Chris Forbese20111c2015-07-03 13:50:24 +12001063VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001064{
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001065 void *fptr;
1066
Chris Forbese20111c2015-07-03 13:50:24 +12001067 if (instance == NULL)
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001068 return NULL;
1069
Chris Forbese20111c2015-07-03 13:50:24 +12001070 if (!strcmp("vkGetInstanceProcAddr", funcName)) {
1071 initInstanceTable(shader_checker_instance_table_map, (const VkBaseLayerObject *) instance);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001072 return (void *) vkGetInstanceProcAddr;
1073 }
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001074#define ADD_HOOK(fn) \
Chris Forbese20111c2015-07-03 13:50:24 +12001075 if (!strncmp(#fn, funcName, sizeof(#fn))) \
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001076 return (void *) fn
1077
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001078 ADD_HOOK(vkCreateInstance);
Jon Ashburn17f37372015-05-19 16:34:53 -06001079 ADD_HOOK(vkDestroyInstance);
Tony Barbour426b9052015-06-24 16:06:58 -06001080 ADD_HOOK(vkGetGlobalExtensionProperties);
Tony Barbour426b9052015-06-24 16:06:58 -06001081 ADD_HOOK(vkGetPhysicalDeviceExtensionProperties);
Courtney Goeltzenleuchter7abf8e52015-07-07 10:05:05 -06001082 ADD_HOOK(vkGetGlobalLayerProperties);
1083 ADD_HOOK(vkGetPhysicalDeviceLayerProperties);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001084#undef ADD_HOOK
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001085
Chris Forbese20111c2015-07-03 13:50:24 +12001086
1087 layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
1088 fptr = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001089 if (fptr)
1090 return fptr;
1091
Chris Forbese20111c2015-07-03 13:50:24 +12001092 {
1093 VkLayerInstanceDispatchTable* pTable = get_dispatch_table(shader_checker_instance_table_map, instance);
1094 if (pTable->GetInstanceProcAddr == NULL)
1095 return NULL;
1096 return pTable->GetInstanceProcAddr(instance, funcName);
1097 }
Chris Forbesaab9d112015-04-02 13:22:31 +13001098}