blob: e0ab544fdfa58cf0dee925ffa64bc895ad7d6418 [file] [log] [blame]
Chris Forbesaab9d112015-04-02 13:22:31 +13001/*
2 * Vulkan
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24#include <string.h>
25#include <stdlib.h>
26#include <assert.h>
Chris Forbes67cc36f2015-04-13 12:14:52 +120027#include <map>
Chris Forbesaab9d112015-04-02 13:22:31 +130028#include <unordered_map>
Chris Forbesbb164b62015-04-08 10:19:16 +120029#include <map>
Chris Forbes4396ff52015-04-08 10:11:59 +120030#include <vector>
Chris Forbesaab9d112015-04-02 13:22:31 +130031#include "loader_platform.h"
32#include "vk_dispatch_table_helper.h"
33#include "vkLayer.h"
Chris Forbes1b466bd2015-04-15 06:59:41 +120034#include "layers_config.h"
35#include "layers_msg.h"
Jon Ashburn5a10d212015-06-01 10:02:09 -060036#include "layers_table.h"
Chris Forbes3317b382015-05-04 14:04:24 +120037#include "vk_enum_string_helper.h"
Chris Forbes5c75afe2015-04-17 10:13:28 +120038#include "shader_checker.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130039// The following is #included again to catch certain OS-specific functions
40// being used:
41#include "loader_platform.h"
42
Chris Forbes32e3b462015-05-09 10:31:21 +120043#include "spirv/spirv.h"
Chris Forbesaab9d112015-04-02 13:22:31 +130044
Chris Forbesaab9d112015-04-02 13:22:31 +130045
Chris Forbes1b466bd2015-04-15 06:59:41 +120046static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
Chris Forbes1ed0f982015-05-29 14:55:18 +120047// TODO : This can be much smarter, using separate locks for separate global data
48static int globalLockInitialized = 0;
49static loader_platform_thread_mutex globalLock;
Chris Forbes4396ff52015-04-08 10:11:59 +120050
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120051
52static void
53build_type_def_index(std::vector<unsigned> const &words, std::unordered_map<unsigned, unsigned> &type_def_index)
54{
55 unsigned int const *code = (unsigned int const *)&words[0];
56 size_t size = words.size();
57
58 unsigned word = 5;
59 while (word < size) {
60 unsigned opcode = code[word] & 0x0ffffu;
61 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
62
63 switch (opcode) {
64 case spv::OpTypeVoid:
65 case spv::OpTypeBool:
66 case spv::OpTypeInt:
67 case spv::OpTypeFloat:
68 case spv::OpTypeVector:
69 case spv::OpTypeMatrix:
70 case spv::OpTypeSampler:
71 case spv::OpTypeFilter:
72 case spv::OpTypeArray:
73 case spv::OpTypeRuntimeArray:
74 case spv::OpTypeStruct:
75 case spv::OpTypeOpaque:
76 case spv::OpTypePointer:
77 case spv::OpTypeFunction:
78 case spv::OpTypeEvent:
79 case spv::OpTypeDeviceEvent:
80 case spv::OpTypeReserveId:
81 case spv::OpTypeQueue:
82 case spv::OpTypePipe:
83 type_def_index[code[word+1]] = word;
84 break;
85
86 default:
87 /* We only care about type definitions */
88 break;
89 }
90
91 word += oplen;
92 }
93}
94
Chris Forbes4396ff52015-04-08 10:11:59 +120095struct shader_source {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120096 /* the spirv image itself */
Chris Forbes4396ff52015-04-08 10:11:59 +120097 std::vector<uint32_t> words;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +120098 /* a mapping of <id> to the first word of its def. this is useful because walking type
99 * trees requires jumping all over the instruction stream.
100 */
101 std::unordered_map<unsigned, unsigned> type_def_index;
Chris Forbes4453c772015-06-05 15:01:08 +1200102 bool is_spirv;
Chris Forbes4396ff52015-04-08 10:11:59 +1200103
104 shader_source(VkShaderCreateInfo const *pCreateInfo) :
Chris Forbes4453c772015-06-05 15:01:08 +1200105 words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
106 type_def_index(),
107 is_spirv(true) {
108
109 if (words.size() < 5 || words[0] != spv::MagicNumber || words[1] != spv::Version) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600110 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
Chris Forbes4453c772015-06-05 15:01:08 +1200111 "Shader is not SPIR-V, most checks will not be possible");
112 is_spirv = false;
113 return;
114 }
115
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200116
117 build_type_def_index(words, type_def_index);
Chris Forbes4396ff52015-04-08 10:11:59 +1200118 }
119};
120
121
122static std::unordered_map<void *, shader_source *> shader_map;
123
124
Chris Forbes1b466bd2015-04-15 06:59:41 +1200125static void
126initLayer()
127{
128 const char *strOpt;
129 // initialize ShaderChecker options
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600130 getLayerOptionEnum("ShaderCheckerReportLevel", (uint32_t *) &g_reportFlags);
Chris Forbes1b466bd2015-04-15 06:59:41 +1200131 g_actionIsDefault = getLayerOptionEnum("ShaderCheckerDebugAction", (uint32_t *) &g_debugAction);
132
133 if (g_debugAction & VK_DBG_LAYER_ACTION_LOG_MSG)
134 {
135 strOpt = getLayerOption("ShaderCheckerLogFilename");
136 if (strOpt)
137 {
138 g_logFile = fopen(strOpt, "w");
139 }
140 if (g_logFile == NULL)
141 g_logFile = stdout;
142 }
143}
144
Tobin Ehlis432a9ba2015-04-17 08:55:13 -0600145#define SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE 2
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600146static const VkExtensionProperties shaderCheckerExts[SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE] = {
147 {
148 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
149 "ShaderChecker",
150 0x10,
151 "Sample layer: ShaderChecker",
Jon Ashburnade3bee2015-06-10 16:43:31 -0600152 },
153 {
154 VK_STRUCTURE_TYPE_EXTENSION_PROPERTIES,
155 "Validation",
156 0x10,
157 "Sample layer: ShaderChecker",
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600158 }
Chris Forbesaab9d112015-04-02 13:22:31 +1300159};
160
Chris Forbesaab9d112015-04-02 13:22:31 +1300161VK_LAYER_EXPORT VkResult VKAPI vkGetGlobalExtensionInfo(
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600162 VkExtensionInfoType infoType,
163 uint32_t extensionIndex,
164 size_t* pDataSize,
165 void* pData)
Chris Forbesaab9d112015-04-02 13:22:31 +1300166{
Chris Forbesaab9d112015-04-02 13:22:31 +1300167 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
Chris Forbesaab9d112015-04-02 13:22:31 +1300168 uint32_t *count;
169
170 if (pDataSize == NULL)
171 return VK_ERROR_INVALID_POINTER;
172
173 switch (infoType) {
174 case VK_EXTENSION_INFO_TYPE_COUNT:
175 *pDataSize = sizeof(uint32_t);
176 if (pData == NULL)
177 return VK_SUCCESS;
178 count = (uint32_t *) pData;
179 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
180 break;
181 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
182 *pDataSize = sizeof(VkExtensionProperties);
183 if (pData == NULL)
184 return VK_SUCCESS;
185 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
186 return VK_ERROR_INVALID_VALUE;
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600187 memcpy((VkExtensionProperties *) pData, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
Chris Forbesaab9d112015-04-02 13:22:31 +1300188 break;
189 default:
190 return VK_ERROR_INVALID_VALUE;
191 };
192
193 return VK_SUCCESS;
194}
195
Jon Ashburnade3bee2015-06-10 16:43:31 -0600196VK_LAYER_EXPORT VkResult VKAPI vkGetPhysicalDeviceExtensionInfo(
197 VkPhysicalDevice gpu,
198 VkExtensionInfoType infoType,
199 uint32_t extensionIndex,
200 size_t* pDataSize,
201 void* pData)
202{
203 /* This entrypoint is NOT going to init it's own dispatch table since loader calls here early */
204 uint32_t *count;
205
206 if (pDataSize == NULL)
207 return VK_ERROR_INVALID_POINTER;
208
209 switch (infoType) {
210 case VK_EXTENSION_INFO_TYPE_COUNT:
211 *pDataSize = sizeof(uint32_t);
212 if (pData == NULL)
213 return VK_SUCCESS;
214 count = (uint32_t *) pData;
215 *count = SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE;
216 break;
217 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
218 *pDataSize = sizeof(VkExtensionProperties);
219 if (pData == NULL)
220 return VK_SUCCESS;
221 if (extensionIndex >= SHADER_CHECKER_LAYER_EXT_ARRAY_SIZE)
222 return VK_ERROR_INVALID_VALUE;
223 memcpy((VkExtensionProperties *) pData, &shaderCheckerExts[extensionIndex], sizeof(VkExtensionProperties));
224 break;
225 default:
226 return VK_ERROR_INVALID_VALUE;
227 };
228
229 return VK_SUCCESS;
230}
Chris Forbesaab9d112015-04-02 13:22:31 +1300231
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200232static char const *
233storage_class_name(unsigned sc)
234{
235 switch (sc) {
Cody Northrop812b4612015-04-20 14:09:40 -0600236 case spv::StorageClassInput: return "input";
237 case spv::StorageClassOutput: return "output";
238 case spv::StorageClassUniformConstant: return "const uniform";
239 case spv::StorageClassUniform: return "uniform";
240 case spv::StorageClassWorkgroupLocal: return "workgroup local";
241 case spv::StorageClassWorkgroupGlobal: return "workgroup global";
242 case spv::StorageClassPrivateGlobal: return "private global";
243 case spv::StorageClassFunction: return "function";
244 case spv::StorageClassGeneric: return "generic";
245 case spv::StorageClassPrivate: return "private";
246 case spv::StorageClassAtomicCounter: return "atomic counter";
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200247 default: return "unknown";
248 }
249}
250
251
252/* returns ptr to null terminator */
253static char *
254describe_type(char *dst, shader_source const *src, unsigned type)
255{
256 auto type_def_it = src->type_def_index.find(type);
257
258 if (type_def_it == src->type_def_index.end()) {
259 return dst + sprintf(dst, "undef");
260 }
261
262 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
263 unsigned opcode = code[0] & 0x0ffffu;
264 switch (opcode) {
265 case spv::OpTypeBool:
266 return dst + sprintf(dst, "bool");
267 case spv::OpTypeInt:
268 return dst + sprintf(dst, "%cint%d", code[3] ? 's' : 'u', code[2]);
269 case spv::OpTypeFloat:
270 return dst + sprintf(dst, "float%d", code[2]);
271 case spv::OpTypeVector:
272 dst += sprintf(dst, "vec%d of ", code[3]);
273 return describe_type(dst, src, code[2]);
274 case spv::OpTypeMatrix:
275 dst += sprintf(dst, "mat%d of ", code[3]);
276 return describe_type(dst, src, code[2]);
277 case spv::OpTypeArray:
278 dst += sprintf(dst, "arr[%d] of ", code[3]);
279 return describe_type(dst, src, code[2]);
280 case spv::OpTypePointer:
281 dst += sprintf(dst, "ptr to %s ", storage_class_name(code[2]));
282 return describe_type(dst, src, code[3]);
283 case spv::OpTypeStruct:
284 {
285 unsigned oplen = code[0] >> 16;
286 dst += sprintf(dst, "struct of (");
Ian Elliottf21f14b2015-04-17 11:05:04 -0600287 for (unsigned i = 2; i < oplen; i++) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200288 dst = describe_type(dst, src, code[i]);
289 dst += sprintf(dst, i == oplen-1 ? ")" : ", ");
290 }
291 return dst;
292 }
293 default:
294 return dst + sprintf(dst, "oddtype");
295 }
296}
297
298
299static bool
Chris Forbes0a94a372015-06-05 14:57:05 +1200300types_match(shader_source const *a, shader_source const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200301{
302 auto a_type_def_it = a->type_def_index.find(a_type);
303 auto b_type_def_it = b->type_def_index.find(b_type);
304
305 if (a_type_def_it == a->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200306 return false;
307 }
308
309 if (b_type_def_it == b->type_def_index.end()) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200310 return false;
311 }
312
313 /* walk two type trees together, and complain about differences */
314 unsigned int const *a_code = (unsigned int const *)&a->words[a_type_def_it->second];
315 unsigned int const *b_code = (unsigned int const *)&b->words[b_type_def_it->second];
316
317 unsigned a_opcode = a_code[0] & 0x0ffffu;
318 unsigned b_opcode = b_code[0] & 0x0ffffu;
319
Chris Forbes0a94a372015-06-05 14:57:05 +1200320 if (b_arrayed && b_opcode == spv::OpTypeArray) {
321 /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
322 return types_match(a, b, a_type, b_code[2], false);
323 }
324
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200325 if (a_opcode != b_opcode) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200326 return false;
327 }
328
329 switch (a_opcode) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200330 /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200331 case spv::OpTypeBool:
Chris Forbes0a94a372015-06-05 14:57:05 +1200332 return true && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200333 case spv::OpTypeInt:
334 /* match on width, signedness */
Chris Forbes0a94a372015-06-05 14:57:05 +1200335 return a_code[2] == b_code[2] && a_code[3] == b_code[3] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200336 case spv::OpTypeFloat:
337 /* match on width */
Chris Forbes0a94a372015-06-05 14:57:05 +1200338 return a_code[2] == b_code[2] && !b_arrayed;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200339 case spv::OpTypeVector:
340 case spv::OpTypeMatrix:
341 case spv::OpTypeArray:
Chris Forbes0a94a372015-06-05 14:57:05 +1200342 /* match on element type, count. these all have the same layout. we don't get here if
343 * b_arrayed -- that is handled above. */
344 return !b_arrayed && types_match(a, b, a_code[2], b_code[2], b_arrayed) && a_code[3] == b_code[3];
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200345 case spv::OpTypeStruct:
346 /* match on all element types */
347 {
Chris Forbes0a94a372015-06-05 14:57:05 +1200348 if (b_arrayed) {
349 /* for the purposes of matching different levels of arrayness, structs are leaves. */
350 return false;
351 }
352
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200353 unsigned a_len = a_code[0] >> 16;
354 unsigned b_len = b_code[0] >> 16;
355
356 if (a_len != b_len) {
357 return false; /* structs cannot match if member counts differ */
358 }
359
Ian Elliottf21f14b2015-04-17 11:05:04 -0600360 for (unsigned i = 2; i < a_len; i++) {
Chris Forbes0a94a372015-06-05 14:57:05 +1200361 if (!types_match(a, b, a_code[i], b_code[i], b_arrayed)) {
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200362 return false;
363 }
364 }
365
366 return true;
367 }
368 case spv::OpTypePointer:
369 /* match on pointee type. storage class is expected to differ */
Chris Forbes0a94a372015-06-05 14:57:05 +1200370 return types_match(a, b, a_code[3], b_code[3], b_arrayed);
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200371
372 default:
373 /* remaining types are CLisms, or may not appear in the interfaces we
374 * are interested in. Just claim no match.
375 */
376 return false;
377
378 }
379}
380
381
Chris Forbes67cc36f2015-04-13 12:14:52 +1200382static int
383value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
384{
385 auto it = map.find(id);
386 if (it == map.end())
387 return def;
388 else
389 return it->second;
390}
391
392
393struct interface_var {
394 uint32_t id;
395 uint32_t type_id;
396 /* TODO: collect the name, too? Isn't required to be present. */
397};
398
399
400static void
Ian Elliottf21f14b2015-04-17 11:05:04 -0600401collect_interface_by_location(shader_source const *src, spv::StorageClass sinterface,
Chris Forbes67cc36f2015-04-13 12:14:52 +1200402 std::map<uint32_t, interface_var> &out,
403 std::map<uint32_t, interface_var> &builtins_out)
404{
405 unsigned int const *code = (unsigned int const *)&src->words[0];
406 size_t size = src->words.size();
407
Chris Forbes67cc36f2015-04-13 12:14:52 +1200408 std::unordered_map<unsigned, unsigned> var_locations;
409 std::unordered_map<unsigned, unsigned> var_builtins;
410
411 unsigned word = 5;
412 while (word < size) {
413
414 unsigned opcode = code[word] & 0x0ffffu;
415 unsigned oplen = (code[word] & 0xffff0000u) >> 16;
416
417 /* We consider two interface models: SSO rendezvous-by-location, and
418 * builtins. Complain about anything that fits neither model.
419 */
420 if (opcode == spv::OpDecorate) {
Cody Northrop812b4612015-04-20 14:09:40 -0600421 if (code[word+2] == spv::DecorationLocation) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200422 var_locations[code[word+1]] = code[word+3];
423 }
424
Cody Northrop812b4612015-04-20 14:09:40 -0600425 if (code[word+2] == spv::DecorationBuiltIn) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200426 var_builtins[code[word+1]] = code[word+3];
427 }
428 }
429
430 /* TODO: handle grouped decorations */
431 /* TODO: handle index=1 dual source outputs from FS -- two vars will
432 * have the same location, and we DONT want to clobber. */
433
Ian Elliottf21f14b2015-04-17 11:05:04 -0600434 if (opcode == spv::OpVariable && code[word+3] == sinterface) {
Chris Forbes67cc36f2015-04-13 12:14:52 +1200435 int location = value_or_default(var_locations, code[word+2], -1);
436 int builtin = value_or_default(var_builtins, code[word+2], -1);
437
438 if (location == -1 && builtin == -1) {
439 /* No location defined, and not bound to an API builtin.
440 * The spec says nothing about how this case works (or doesn't)
441 * for interface matching.
442 */
Chris Forbes5c75afe2015-04-17 10:13:28 +1200443 char str[1024];
444 sprintf(str, "var %d (type %d) in %s interface has no Location or Builtin decoration\n",
Ian Elliottf21f14b2015-04-17 11:05:04 -0600445 code[word+2], code[word+1], storage_class_name(sinterface));
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600446 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC", str);
Chris Forbes67cc36f2015-04-13 12:14:52 +1200447 }
448 else if (location != -1) {
449 /* A user-defined interface variable, with a location. */
450 interface_var v;
451 v.id = code[word+2];
452 v.type_id = code[word+1];
453 out[location] = v;
454 }
455 else {
456 /* A builtin interface variable */
457 interface_var v;
458 v.id = code[word+2];
459 v.type_id = code[word+1];
460 builtins_out[builtin] = v;
461 }
462 }
463
464 word += oplen;
465 }
466}
467
468
Chris Forbesaab9d112015-04-02 13:22:31 +1300469VK_LAYER_EXPORT VkResult VKAPI vkCreateShader(VkDevice device, const VkShaderCreateInfo *pCreateInfo,
470 VkShader *pShader)
471{
Chris Forbes1ed0f982015-05-29 14:55:18 +1200472 loader_platform_thread_lock_mutex(&globalLock);
Jon Ashburn5a10d212015-06-01 10:02:09 -0600473 VkResult res = device_dispatch_table(device)->CreateShader(device, pCreateInfo, pShader);
Chris Forbes4396ff52015-04-08 10:11:59 +1200474
475 shader_map[(VkBaseLayerObject *) *pShader] = new shader_source(pCreateInfo);
Chris Forbes1ed0f982015-05-29 14:55:18 +1200476 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesaab9d112015-04-02 13:22:31 +1300477 return res;
478}
479
480
Chris Forbes5f362d02015-05-25 11:13:22 +1200481static bool
Chris Forbesbb164b62015-04-08 10:19:16 +1200482validate_interface_between_stages(shader_source const *producer, char const *producer_name,
Chris Forbes4453c772015-06-05 15:01:08 +1200483 shader_source const *consumer, char const *consumer_name,
484 bool consumer_arrayed_input)
Chris Forbesbb164b62015-04-08 10:19:16 +1200485{
486 std::map<uint32_t, interface_var> outputs;
487 std::map<uint32_t, interface_var> inputs;
488
489 std::map<uint32_t, interface_var> builtin_outputs;
490 std::map<uint32_t, interface_var> builtin_inputs;
491
Chris Forbes5c75afe2015-04-17 10:13:28 +1200492 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200493 bool pass = true;
Chris Forbesbb164b62015-04-08 10:19:16 +1200494
Cody Northrop812b4612015-04-20 14:09:40 -0600495 collect_interface_by_location(producer, spv::StorageClassOutput, outputs, builtin_outputs);
496 collect_interface_by_location(consumer, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesbb164b62015-04-08 10:19:16 +1200497
498 auto a_it = outputs.begin();
499 auto b_it = inputs.begin();
500
501 /* maps sorted by key (location); walk them together to find mismatches */
David Pinedof5997ab2015-04-27 16:36:17 -0600502 while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
503 bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
504 bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200505 auto a_first = a_at_end ? 0 : a_it->first;
506 auto b_first = b_at_end ? 0 : b_it->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600507
508 if (b_at_end || a_first < b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200509 sprintf(str, "%s writes to output location %d which is not consumed by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600510 producer_name, a_first, consumer_name);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600511 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesbb164b62015-04-08 10:19:16 +1200512 a_it++;
513 }
David Pinedof5997ab2015-04-27 16:36:17 -0600514 else if (a_at_end || a_first > b_first) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200515 sprintf(str, "%s consumes input location %d which is not written by %s\n",
David Pinedof5997ab2015-04-27 16:36:17 -0600516 consumer_name, b_first, producer_name);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600517 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200518 pass = false;
Chris Forbesbb164b62015-04-08 10:19:16 +1200519 b_it++;
520 }
521 else {
Chris Forbes4453c772015-06-05 15:01:08 +1200522 if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200523 /* OK! */
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200524 }
525 else {
526 char producer_type[1024];
527 char consumer_type[1024];
528 describe_type(producer_type, producer, a_it->second.type_id);
529 describe_type(consumer_type, consumer, b_it->second.type_id);
530
Chris Forbes5c75afe2015-04-17 10:13:28 +1200531 sprintf(str, "Type mismatch on location %d: '%s' vs '%s'\n", a_it->first,
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200532 producer_type, consumer_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600533 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200534 pass = false;
Chris Forbes1bb5a2e2015-04-10 11:41:20 +1200535 }
Chris Forbesbb164b62015-04-08 10:19:16 +1200536 a_it++;
537 b_it++;
538 }
539 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200540
541 return pass;
Chris Forbesbb164b62015-04-08 10:19:16 +1200542}
543
544
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200545enum FORMAT_TYPE {
546 FORMAT_TYPE_UNDEFINED,
547 FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
548 FORMAT_TYPE_SINT,
549 FORMAT_TYPE_UINT,
550};
551
552
553static unsigned
554get_format_type(VkFormat fmt) {
555 switch (fmt) {
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800556 case VK_FORMAT_UNDEFINED:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200557 return FORMAT_TYPE_UNDEFINED;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800558 case VK_FORMAT_R8_SINT:
559 case VK_FORMAT_R8G8_SINT:
560 case VK_FORMAT_R8G8B8_SINT:
561 case VK_FORMAT_R8G8B8A8_SINT:
562 case VK_FORMAT_R16_SINT:
563 case VK_FORMAT_R16G16_SINT:
564 case VK_FORMAT_R16G16B16_SINT:
565 case VK_FORMAT_R16G16B16A16_SINT:
566 case VK_FORMAT_R32_SINT:
567 case VK_FORMAT_R32G32_SINT:
568 case VK_FORMAT_R32G32B32_SINT:
569 case VK_FORMAT_R32G32B32A32_SINT:
570 case VK_FORMAT_B8G8R8_SINT:
571 case VK_FORMAT_B8G8R8A8_SINT:
572 case VK_FORMAT_R10G10B10A2_SINT:
573 case VK_FORMAT_B10G10R10A2_SINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200574 return FORMAT_TYPE_SINT;
Chia-I Wu6097f3a2015-04-17 02:00:54 +0800575 case VK_FORMAT_R8_UINT:
576 case VK_FORMAT_R8G8_UINT:
577 case VK_FORMAT_R8G8B8_UINT:
578 case VK_FORMAT_R8G8B8A8_UINT:
579 case VK_FORMAT_R16_UINT:
580 case VK_FORMAT_R16G16_UINT:
581 case VK_FORMAT_R16G16B16_UINT:
582 case VK_FORMAT_R16G16B16A16_UINT:
583 case VK_FORMAT_R32_UINT:
584 case VK_FORMAT_R32G32_UINT:
585 case VK_FORMAT_R32G32B32_UINT:
586 case VK_FORMAT_R32G32B32A32_UINT:
587 case VK_FORMAT_B8G8R8_UINT:
588 case VK_FORMAT_B8G8R8A8_UINT:
589 case VK_FORMAT_R10G10B10A2_UINT:
590 case VK_FORMAT_B10G10R10A2_UINT:
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200591 return FORMAT_TYPE_UINT;
592 default:
593 return FORMAT_TYPE_FLOAT;
594 }
595}
596
597
Chris Forbes28c50882015-05-04 14:04:06 +1200598/* characterizes a SPIR-V type appearing in an interface to a FF stage,
599 * for comparison to a VkFormat's characterization above. */
600static unsigned
601get_fundamental_type(shader_source const *src, unsigned type)
602{
603 auto type_def_it = src->type_def_index.find(type);
604
605 if (type_def_it == src->type_def_index.end()) {
606 return FORMAT_TYPE_UNDEFINED;
607 }
608
609 unsigned int const *code = (unsigned int const *)&src->words[type_def_it->second];
610 unsigned opcode = code[0] & 0x0ffffu;
611 switch (opcode) {
612 case spv::OpTypeInt:
613 return code[3] ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
614 case spv::OpTypeFloat:
615 return FORMAT_TYPE_FLOAT;
616 case spv::OpTypeVector:
617 return get_fundamental_type(src, code[2]);
618 case spv::OpTypeMatrix:
619 return get_fundamental_type(src, code[2]);
620 case spv::OpTypeArray:
621 return get_fundamental_type(src, code[2]);
622 case spv::OpTypePointer:
623 return get_fundamental_type(src, code[3]);
624 default:
625 return FORMAT_TYPE_UNDEFINED;
626 }
627}
628
629
Chris Forbes5f362d02015-05-25 11:13:22 +1200630static bool
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200631validate_vi_consistency(VkPipelineVertexInputCreateInfo const *vi)
632{
633 /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
634 * each binding should be specified only once.
635 */
636 std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
637 char str[1024];
638 bool pass = true;
639
640 for (unsigned i = 0; i < vi->bindingCount; i++) {
641 auto desc = &vi->pVertexBindingDescriptions[i];
642 auto & binding = bindings[desc->binding];
643 if (binding) {
644 sprintf(str, "Duplicate vertex input binding descriptions for binding %d", desc->binding);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600645 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INCONSISTENT_VI, "SC", str);
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200646 pass = false;
647 }
648 else {
649 binding = desc;
650 }
651 }
652
653 return pass;
654}
655
656
657static bool
Chris Forbesfcd05f12015-04-08 10:36:37 +1200658validate_vi_against_vs_inputs(VkPipelineVertexInputCreateInfo const *vi, shader_source const *vs)
659{
660 std::map<uint32_t, interface_var> inputs;
661 /* we collect builtin inputs, but they will never appear in the VI state --
662 * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
663 */
664 std::map<uint32_t, interface_var> builtin_inputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200665 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200666 bool pass = true;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200667
Cody Northrop812b4612015-04-20 14:09:40 -0600668 collect_interface_by_location(vs, spv::StorageClassInput, inputs, builtin_inputs);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200669
670 /* Build index by location */
671 std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
Chris Forbes6f2ab982015-05-25 11:13:24 +1200672 if (vi) {
673 for (unsigned i = 0; i < vi->attributeCount; i++)
674 attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
675 }
Chris Forbesfcd05f12015-04-08 10:36:37 +1200676
677 auto it_a = attribs.begin();
678 auto it_b = inputs.begin();
679
David Pinedof5997ab2015-04-27 16:36:17 -0600680 while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
681 bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
682 bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
Chris Forbes4cb97672015-06-10 08:37:27 +1200683 auto a_first = a_at_end ? 0 : it_a->first;
684 auto b_first = b_at_end ? 0 : it_b->first;
David Pinedof5997ab2015-04-27 16:36:17 -0600685 if (b_at_end || a_first < b_first) {
686 sprintf(str, "Vertex attribute at location %d not consumed by VS", a_first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600687 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbesfcd05f12015-04-08 10:36:37 +1200688 it_a++;
689 }
David Pinedof5997ab2015-04-27 16:36:17 -0600690 else if (a_at_end || b_first < a_first) {
691 sprintf(str, "VS consumes input at location %d but not provided", b_first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600692 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200693 pass = false;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200694 it_b++;
695 }
696 else {
Chris Forbes3317b382015-05-04 14:04:24 +1200697 unsigned attrib_type = get_format_type(it_a->second->format);
698 unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
699
700 /* type checking */
701 if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
702 char vs_type[1024];
703 describe_type(vs_type, vs, it_b->second.type_id);
704 sprintf(str, "Attribute type of `%s` at location %d does not match VS input type of `%s`",
705 string_VkFormat(it_a->second->format), a_first, vs_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600706 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200707 pass = false;
Chris Forbes3317b382015-05-04 14:04:24 +1200708 }
709
Chris Forbes5c75afe2015-04-17 10:13:28 +1200710 /* OK! */
Chris Forbesfcd05f12015-04-08 10:36:37 +1200711 it_a++;
712 it_b++;
713 }
714 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200715
716 return pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200717}
718
719
Chris Forbes5f362d02015-05-25 11:13:22 +1200720static bool
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200721validate_fs_outputs_against_cb(shader_source const *fs, VkPipelineCbStateCreateInfo const *cb)
722{
723 std::map<uint32_t, interface_var> outputs;
724 std::map<uint32_t, interface_var> builtin_outputs;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200725 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200726 bool pass = true;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200727
728 /* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
729
Cody Northrop812b4612015-04-20 14:09:40 -0600730 collect_interface_by_location(fs, spv::StorageClassOutput, outputs, builtin_outputs);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200731
732 /* Check for legacy gl_FragColor broadcast: In this case, we should have no user-defined outputs,
733 * and all color attachment should be UNORM/SNORM/FLOAT.
734 */
735 if (builtin_outputs.find(spv::BuiltInFragColor) != builtin_outputs.end()) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200736 if (outputs.size()) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600737 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_FS_MIXED_BROADCAST, "SC",
Chris Forbes5c75afe2015-04-17 10:13:28 +1200738 "Should not have user-defined FS outputs when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200739 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200740 }
741
Ian Elliottf21f14b2015-04-17 11:05:04 -0600742 for (unsigned i = 0; i < cb->attachmentCount; i++) {
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200743 unsigned attachmentType = get_format_type(cb->pAttachments[i].format);
744 if (attachmentType == FORMAT_TYPE_SINT || attachmentType == FORMAT_TYPE_UINT) {
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600745 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
Chris Forbes5c75afe2015-04-17 10:13:28 +1200746 "CB format should not be SINT or UINT when using broadcast");
Chris Forbes5f362d02015-05-25 11:13:22 +1200747 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200748 }
749 }
750
Chris Forbes5f362d02015-05-25 11:13:22 +1200751 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200752 }
753
754 auto it = outputs.begin();
755 uint32_t attachment = 0;
756
757 /* Walk attachment list and outputs together -- this is a little overpowered since attachments
758 * are currently dense, but the parallel with matching between shader stages is nice.
759 */
760
Chris Forbes8802c992015-05-05 11:34:14 +1200761 while ((outputs.size() > 0 && it != outputs.end()) || attachment < cb->attachmentCount) {
scygan7a62cbe2015-06-01 19:48:11 +0200762 if (attachment == cb->attachmentCount || ( it != outputs.end() && it->first < attachment)) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200763 sprintf(str, "FS writes to output location %d with no matching attachment", it->first);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600764 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200765 it++;
766 }
767 else if (it == outputs.end() || it->first > attachment) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200768 sprintf(str, "Attachment %d not written by FS", attachment);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600769 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC", str);
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200770 attachment++;
Chris Forbes5f362d02015-05-25 11:13:22 +1200771 pass = false;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200772 }
773 else {
Chris Forbes4b009002015-05-04 14:20:10 +1200774 unsigned output_type = get_fundamental_type(fs, it->second.type_id);
775 unsigned att_type = get_format_type(cb->pAttachments[attachment].format);
776
777 /* type checking */
778 if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
779 char fs_type[1024];
780 describe_type(fs_type, fs, it->second.type_id);
781 sprintf(str, "Attachment %d of type `%s` does not match FS output type of `%s`",
782 attachment, string_VkFormat(cb->pAttachments[attachment].format), fs_type);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600783 layerCbMsg(VK_DBG_REPORT_ERROR_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC", str);
Chris Forbes5f362d02015-05-25 11:13:22 +1200784 pass = false;
Chris Forbes4b009002015-05-04 14:20:10 +1200785 }
786
Chris Forbes5c75afe2015-04-17 10:13:28 +1200787 /* OK! */
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200788 it++;
789 attachment++;
790 }
791 }
Chris Forbes5f362d02015-05-25 11:13:22 +1200792
793 return pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200794}
795
796
Chris Forbes4453c772015-06-05 15:01:08 +1200797struct shader_stage_attributes {
798 char const * const name;
799 bool arrayed_input;
800};
801
802
803static shader_stage_attributes
804shader_stage_attribs[VK_SHADER_STAGE_FRAGMENT + 1] = {
805 { "vertex shader", false },
806 { "tessellation control shader", true },
807 { "tessellation evaluation shader", false },
808 { "geometry shader", true },
809 { "fragment shader", false },
810};
811
812
Chris Forbesf1060ca2015-06-04 20:23:00 +1200813static bool
814validate_graphics_pipeline(VkGraphicsPipelineCreateInfo const *pCreateInfo)
Chris Forbes60540932015-04-08 10:15:35 +1200815{
Chris Forbes8f600932015-04-08 10:16:45 +1200816 /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
817 * before trying to do anything more: */
818
Chris Forbes4453c772015-06-05 15:01:08 +1200819 shader_source const *shaders[VK_SHADER_STAGE_FRAGMENT + 1]; /* exclude CS */
820 memset(shaders, 0, sizeof(shaders));
Chris Forbes8f600932015-04-08 10:16:45 +1200821 VkPipelineCbStateCreateInfo const *cb = 0;
822 VkPipelineVertexInputCreateInfo const *vi = 0;
Chris Forbes5c75afe2015-04-17 10:13:28 +1200823 char str[1024];
Chris Forbes5f362d02015-05-25 11:13:22 +1200824 bool pass = true;
Chris Forbes8f600932015-04-08 10:16:45 +1200825
Chris Forbes1ed0f982015-05-29 14:55:18 +1200826 loader_platform_thread_lock_mutex(&globalLock);
827
Chris Forbes8f600932015-04-08 10:16:45 +1200828 for (auto stage = pCreateInfo; stage; stage = (decltype(stage))stage->pNext) {
829 if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
830 auto shader_stage = (VkPipelineShaderStageCreateInfo const *)stage;
831
Chris Forbes4453c772015-06-05 15:01:08 +1200832 if (shader_stage->shader.stage < VK_SHADER_STAGE_VERTEX || shader_stage->shader.stage > VK_SHADER_STAGE_FRAGMENT) {
Chris Forbes5c75afe2015-04-17 10:13:28 +1200833 sprintf(str, "Unknown shader stage %d\n", shader_stage->shader.stage);
Courtney Goeltzenleuchter1c7c65d2015-06-10 17:39:03 -0600834 layerCbMsg(VK_DBG_REPORT_WARN_BIT, (VkObjectType) 0, NULL, 0, SHADER_CHECKER_UNKNOWN_STAGE, "SC", str);
Chris Forbes5c75afe2015-04-17 10:13:28 +1200835 }
Chris Forbes4453c772015-06-05 15:01:08 +1200836 else {
837 shaders[shader_stage->shader.stage] = shader_map[(void *)(shader_stage->shader.shader)];
838 }
Chris Forbes8f600932015-04-08 10:16:45 +1200839 }
840 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_CB_STATE_CREATE_INFO) {
841 cb = (VkPipelineCbStateCreateInfo const *)stage;
842 }
843 else if (stage->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_CREATE_INFO) {
844 vi = (VkPipelineVertexInputCreateInfo const *)stage;
845 }
846 }
847
Chris Forbes0bf8fe12015-06-12 11:16:41 +1200848 if (vi) {
849 pass = validate_vi_consistency(vi) && pass;
850 }
851
Chris Forbes4453c772015-06-05 15:01:08 +1200852 if (shaders[VK_SHADER_STAGE_VERTEX] && shaders[VK_SHADER_STAGE_VERTEX]->is_spirv) {
853 pass = validate_vi_against_vs_inputs(vi, shaders[VK_SHADER_STAGE_VERTEX]) && pass;
Chris Forbesfcd05f12015-04-08 10:36:37 +1200854 }
855
Chris Forbes4453c772015-06-05 15:01:08 +1200856 /* TODO: enforce rules about present combinations of shaders */
857 int producer = VK_SHADER_STAGE_VERTEX;
858 int consumer = VK_SHADER_STAGE_GEOMETRY;
859
860 while (!shaders[producer] && producer != VK_SHADER_STAGE_FRAGMENT) {
861 producer++;
862 consumer++;
Chris Forbesbb164b62015-04-08 10:19:16 +1200863 }
864
Tony Barbour4eb3cd12015-06-11 15:04:25 -0600865 for (; producer != VK_SHADER_STAGE_FRAGMENT && consumer <= VK_SHADER_STAGE_FRAGMENT; consumer++) {
Chris Forbes4453c772015-06-05 15:01:08 +1200866 assert(shaders[producer]);
867 if (shaders[consumer]) {
868 if (shaders[producer]->is_spirv && shaders[consumer]->is_spirv) {
869 pass = validate_interface_between_stages(shaders[producer], shader_stage_attribs[producer].name,
870 shaders[consumer], shader_stage_attribs[consumer].name,
871 shader_stage_attribs[consumer].arrayed_input) && pass;
872 }
873
874 producer = consumer;
875 }
876 }
877
878 if (shaders[VK_SHADER_STAGE_FRAGMENT] && shaders[VK_SHADER_STAGE_FRAGMENT]->is_spirv && cb) {
879 pass = validate_fs_outputs_against_cb(shaders[VK_SHADER_STAGE_FRAGMENT], cb) && pass;
Chris Forbes9b9f5fe2015-04-08 10:37:20 +1200880 }
881
Chris Forbes1ed0f982015-05-29 14:55:18 +1200882 loader_platform_thread_unlock_mutex(&globalLock);
Chris Forbesf1060ca2015-06-04 20:23:00 +1200883 return pass;
884}
885
886
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200887VK_LAYER_EXPORT VkResult VKAPI
888vkCreateGraphicsPipeline(VkDevice device,
889 const VkGraphicsPipelineCreateInfo *pCreateInfo,
890 VkPipeline *pPipeline)
Chris Forbesf1060ca2015-06-04 20:23:00 +1200891{
892 bool pass = validate_graphics_pipeline(pCreateInfo);
Chris Forbes5f362d02015-05-25 11:13:22 +1200893
894 if (pass) {
895 /* The driver is allowed to crash if passed junk. Only actually create the
896 * pipeline if we didn't run into any showstoppers above.
897 */
Jon Ashburn5a10d212015-06-01 10:02:09 -0600898 return device_dispatch_table(device)->CreateGraphicsPipeline(device, pCreateInfo, pPipeline);
Chris Forbes5f362d02015-05-25 11:13:22 +1200899 }
900 else {
901 return VK_ERROR_UNKNOWN;
902 }
Chris Forbes60540932015-04-08 10:15:35 +1200903}
904
905
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200906VK_LAYER_EXPORT VkResult VKAPI
907vkCreateGraphicsPipelineDerivative(VkDevice device,
908 const VkGraphicsPipelineCreateInfo *pCreateInfo,
909 VkPipeline basePipeline,
910 VkPipeline *pPipeline)
911{
912 bool pass = validate_graphics_pipeline(pCreateInfo);
913
914 if (pass) {
915 /* The driver is allowed to crash if passed junk. Only actually create the
916 * pipeline if we didn't run into any showstoppers above.
917 */
Jon Ashburn5a10d212015-06-01 10:02:09 -0600918 return device_dispatch_table(device)->CreateGraphicsPipelineDerivative(device, pCreateInfo, basePipeline, pPipeline);
Chris Forbesd0f7f7c2015-06-04 20:27:09 +1200919 }
920 else {
921 return VK_ERROR_UNKNOWN;
922 }
923}
924
925
Jon Ashburn17f37372015-05-19 16:34:53 -0600926/* hook DextroyDevice to remove tableMap entry */
927VK_LAYER_EXPORT VkResult VKAPI vkDestroyDevice(VkDevice device)
928{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600929 dispatch_key key = get_dispatch_key(device);
Jon Ashburn5a10d212015-06-01 10:02:09 -0600930 VkResult res = device_dispatch_table(device)->DestroyDevice(device);
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600931 destroy_device_dispatch_table(key);
Jon Ashburn17f37372015-05-19 16:34:53 -0600932 return res;
933}
934
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600935VkResult VKAPI vkCreateInstance(
936 const VkInstanceCreateInfo* pCreateInfo,
937 VkInstance* pInstance)
938{
939
940 loader_platform_thread_once(&g_initOnce, initLayer);
941 /*
942 * For layers, the pInstance has already been filled out
943 * by the loader so that dispatch table is available.
944 */
Jon Ashburnade3bee2015-06-10 16:43:31 -0600945 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(*pInstance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600946
947 VkResult result = pTable->CreateInstance(pCreateInfo, pInstance);
948
949 if (result == VK_SUCCESS) {
950 enable_debug_report(pCreateInfo->extensionCount, pCreateInfo->pEnabledExtensions);
Courtney Goeltzenleuchterf4a2eba2015-06-08 14:58:39 -0600951
952 debug_report_init_instance_extension_dispatch_table(
953 pTable,
954 pTable->GetInstanceProcAddr,
955 *pInstance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600956 }
957 return result;
958}
959
Jon Ashburn17f37372015-05-19 16:34:53 -0600960/* hook DestroyInstance to remove tableInstanceMap entry */
961VK_LAYER_EXPORT VkResult VKAPI vkDestroyInstance(VkInstance instance)
962{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600963 dispatch_key key = get_dispatch_key(instance);
Jon Ashburn5a10d212015-06-01 10:02:09 -0600964 VkResult res = instance_dispatch_table(instance)->DestroyInstance(instance);
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600965 destroy_instance_dispatch_table(key);
Jon Ashburn17f37372015-05-19 16:34:53 -0600966 return res;
967}
Chris Forbesb65ba352015-05-25 11:12:59 +1200968
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600969VK_LAYER_EXPORT VkResult VKAPI vkDbgCreateMsgCallback(
970 VkInstance instance,
971 VkFlags msgFlags,
972 const PFN_vkDbgMsgCallback pfnMsgCallback,
973 void* pUserData,
974 VkDbgMsgCallback* pMsgCallback)
975{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600976 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600977 return layer_create_msg_callback(instance, pTable, msgFlags, pfnMsgCallback, pUserData, pMsgCallback);
978}
979
980VK_LAYER_EXPORT VkResult VKAPI vkDbgDestroyMsgCallback(
981 VkInstance instance,
982 VkDbgMsgCallback msgCallback)
983{
Courtney Goeltzenleuchter9f171942015-06-13 21:22:12 -0600984 VkLayerInstanceDispatchTable *pTable = instance_dispatch_table(instance);
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -0600985 return layer_destroy_msg_callback(instance, pTable, msgCallback);
986}
987
Jon Ashburn1245cec2015-05-18 13:20:15 -0600988VK_LAYER_EXPORT void * VKAPI vkGetDeviceProcAddr(VkDevice device, const char* pName)
Chris Forbesaab9d112015-04-02 13:22:31 +1300989{
Jon Ashburn1245cec2015-05-18 13:20:15 -0600990 if (device == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +1300991 return NULL;
992
Chris Forbes1b466bd2015-04-15 06:59:41 +1200993 loader_platform_thread_once(&g_initOnce, initLayer);
994
Jon Ashburn4f2575f2015-05-28 16:25:02 -0600995 /* loader uses this to force layer initialization; device object is wrapped */
996 if (!strcmp("vkGetDeviceProcAddr", pName)) {
Jon Ashburn5a10d212015-06-01 10:02:09 -0600997 initDeviceTable((const VkBaseLayerObject *) device);
Jon Ashburn4f2575f2015-05-28 16:25:02 -0600998 return (void *) vkGetDeviceProcAddr;
999 }
1000
Chris Forbesaab9d112015-04-02 13:22:31 +13001001#define ADD_HOOK(fn) \
1002 if (!strncmp(#fn, pName, sizeof(#fn))) \
1003 return (void *) fn
1004
Chris Forbesaab9d112015-04-02 13:22:31 +13001005 ADD_HOOK(vkCreateShader);
Jon Ashburn17f37372015-05-19 16:34:53 -06001006 ADD_HOOK(vkDestroyDevice);
Chris Forbes60540932015-04-08 10:15:35 +12001007 ADD_HOOK(vkCreateGraphicsPipeline);
Chris Forbesd0f7f7c2015-06-04 20:27:09 +12001008 ADD_HOOK(vkCreateGraphicsPipelineDerivative);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001009#undef ADD_HOOK
Jon Ashburn5a10d212015-06-01 10:02:09 -06001010 VkLayerDispatchTable* pTable = device_dispatch_table(device);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001011 if (pTable->GetDeviceProcAddr == NULL)
Chris Forbesaab9d112015-04-02 13:22:31 +13001012 return NULL;
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001013 return pTable->GetDeviceProcAddr(device, pName);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001014}
1015
1016VK_LAYER_EXPORT void * VKAPI vkGetInstanceProcAddr(VkInstance inst, const char* pName)
1017{
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001018 void *fptr;
1019
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001020 if (inst == NULL)
1021 return NULL;
1022
Jon Ashburnd9564002015-05-07 10:27:37 -06001023 loader_platform_thread_once(&g_initOnce, initLayer);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001024
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001025 if (!strcmp("vkGetInstanceProcAddr", pName)) {
Jon Ashburn5a10d212015-06-01 10:02:09 -06001026 initInstanceTable((const VkBaseLayerObject *) inst);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001027 return (void *) vkGetInstanceProcAddr;
1028 }
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001029#define ADD_HOOK(fn) \
1030 if (!strncmp(#fn, pName, sizeof(#fn))) \
1031 return (void *) fn
1032
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001033 ADD_HOOK(vkCreateInstance);
Jon Ashburn17f37372015-05-19 16:34:53 -06001034 ADD_HOOK(vkDestroyInstance);
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001035 ADD_HOOK(vkGetGlobalExtensionInfo);
Jon Ashburnade3bee2015-06-10 16:43:31 -06001036 ADD_HOOK(vkGetPhysicalDeviceExtensionInfo);
Jon Ashburn8198fd02015-05-18 09:08:41 -06001037#undef ADD_HOOK
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001038
Courtney Goeltzenleuchter6c813dc2015-06-01 14:46:33 -06001039 fptr = msg_callback_get_proc_addr(pName);
1040 if (fptr)
1041 return fptr;
1042
Jon Ashburn5a10d212015-06-01 10:02:09 -06001043 VkLayerInstanceDispatchTable* pTable = instance_dispatch_table(inst);
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001044 if (pTable->GetInstanceProcAddr == NULL)
Jon Ashburn79b78ac2015-05-05 14:22:52 -06001045 return NULL;
Jon Ashburn4f2575f2015-05-28 16:25:02 -06001046 return pTable->GetInstanceProcAddr(inst, pName);
Chris Forbesaab9d112015-04-02 13:22:31 +13001047}