blob: 3c906aca2660d06cce40178bd869b5517220e7d6 [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600155 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
156 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64
157 ". Was it created? Has it already been destroyed?",
158 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600200 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
201 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64
202 ". Was it created? Has it already been destroyed?",
203 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
245template <typename T1, typename T2>
246static void CreateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
247 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
248
249 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
250 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
251 object_name[object_type], reinterpret_cast<uint64_t>(object));
252
253 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
254 pNewObjNode->object_type = object_type;
255 pNewObjNode->status = OBJSTATUS_NONE;
256 pNewObjNode->handle = reinterpret_cast<uint64_t>(object);
257 instance_data->object_map[object_type][reinterpret_cast<uint64_t>(object)] = pNewObjNode;
258 instance_data->num_objects[object_type]++;
259 instance_data->num_total_objects++;
260}
261
262template <typename T1, typename T2>
263static void CreateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
264 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
265
266 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t &>(object),
267 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
268 object_name[object_type], reinterpret_cast<uint64_t &>(object));
269
270 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
271 pNewObjNode->object_type = object_type;
272 pNewObjNode->status = OBJSTATUS_NONE;
273 pNewObjNode->handle = reinterpret_cast<uint64_t &>(object);
274 device_data->object_map[object_type][reinterpret_cast<uint64_t &>(object)] = pNewObjNode;
275 device_data->num_objects[object_type]++;
276 device_data->num_total_objects++;
277}
278
279template <typename T1, typename T2>
280static void DestroyDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
281 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
282
283 uint64_t object_handle = reinterpret_cast<uint64_t>(object);
284
285 auto item = instance_data->object_map[object_type].find(object_handle);
286 if (item != instance_data->object_map[object_type].end()) {
287
288 OBJTRACK_NODE *pNode = item->second;
289 assert(instance_data->num_total_objects > 0);
290 instance_data->num_total_objects--;
291 assert(instance_data->num_objects[object_type] > 0);
292 instance_data->num_objects[pNode->object_type]--;
293
294 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
295 OBJTRACK_NONE, LayerName,
296 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
297 object_name[pNode->object_type], reinterpret_cast<uint64_t>(object), instance_data->num_total_objects,
298 instance_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
299
300 delete pNode;
301 instance_data->object_map[object_type].erase(item);
302 } else {
303 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
304 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600305 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
306 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600307 }
308}
309
310template <typename T1, typename T2>
311static void DestroyNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
312 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
313
314 uint64_t object_handle = reinterpret_cast<uint64_t &>(object);
315
316 auto item = device_data->object_map[object_type].find(object_handle);
317 if (item != device_data->object_map[object_type].end()) {
318
319 OBJTRACK_NODE *pNode = item->second;
320 assert(device_data->num_total_objects > 0);
321 device_data->num_total_objects--;
322 assert(device_data->num_objects[pNode->object_type] > 0);
323 device_data->num_objects[pNode->object_type]--;
324
325 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
326 OBJTRACK_NONE, LayerName,
327 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
328 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
329 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
330
331 delete pNode;
332 device_data->object_map[object_type].erase(item);
333 } else {
334 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
335 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600336 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
337 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600338 }
339}
340
341template <typename T1, typename T2>
342static bool ValidateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
343 bool null_allowed) {
344 if (null_allowed && (object == VK_NULL_HANDLE)) {
345 return false;
346 }
347 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
348
349 if (instance_data->object_map[object_type].find(reinterpret_cast<uint64_t>(object)) ==
350 instance_data->object_map[object_type].end()) {
351 return log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600352 __LINE__, OBJTRACK_INVALID_OBJECT, LayerName, "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600353 reinterpret_cast<uint64_t>(object));
354 }
355 return false;
356}
357
358template <typename T1, typename T2>
359static bool ValidateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
360 bool null_allowed) {
361 if (null_allowed && (object == VK_NULL_HANDLE)) {
362 return false;
363 }
364 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
365 if (device_data->object_map[object_type].find(reinterpret_cast<uint64_t &>(object)) ==
366 device_data->object_map[object_type].end()) {
367 // If object is an image, also look for it in the swapchain image map
368 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
369 (device_data->swapchainImageMap.find(reinterpret_cast<uint64_t &>(object)) == device_data->swapchainImageMap.end())) {
370 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
371 reinterpret_cast<uint64_t &>(object), __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600372 "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type], reinterpret_cast<uint64_t &>(object));
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600373 }
374 }
375 return false;
376}
377
378static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
379 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
380 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
381 OBJTRACK_NODE *object_info = item->second;
382 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
383 OBJTRACK_OBJECT_LEAK, LayerName,
384 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
385 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
386 item = device_data->object_map[object_type].erase(item);
387 }
388}
389
390VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
391 std::unique_lock<std::mutex> lock(global_lock);
392
393 dispatch_key key = get_dispatch_key(instance);
394 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
395
396 // Enable the temporary callback(s) here to catch cleanup issues:
397 bool callback_setup = false;
398 if (instance_data->num_tmp_callbacks > 0) {
399 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
400 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
401 callback_setup = true;
402 }
403 }
404
405 ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
406
407 DestroyDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
408 // Report any remaining objects in LL
409
410 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
411 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
412 OBJTRACK_NODE *pNode = iit->second;
413
414 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
415
416 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
417 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
418 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
419 // Semaphore:
420 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
421 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
422 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
423 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
424 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
425 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
426 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
427 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
428 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
429 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
430 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
431 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
432 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
433 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
434 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
435 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
436 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
437 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
438 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
439 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
440 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
441 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
442 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
443 }
444 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
445
446 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
447 pInstanceTable->DestroyInstance(instance, pAllocator);
448
449 // Disable and cleanup the temporary callback(s):
450 if (callback_setup) {
451 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
452 }
453 if (instance_data->num_tmp_callbacks > 0) {
454 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
455 instance_data->num_tmp_callbacks = 0;
456 }
457
458 // Clean up logging callback, if any
459 while (instance_data->logging_callback.size() > 0) {
460 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
461 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
462 instance_data->logging_callback.pop_back();
463 }
464
465 layer_debug_report_destroy_instance(instance_data->report_data);
466 layer_data_map.erase(key);
467
468 instanceExtMap.erase(pInstanceTable);
469 lock.unlock();
470 ot_instance_table_map.erase(key);
471}
472
473VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
474
475 std::unique_lock<std::mutex> lock(global_lock);
476 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
477 DestroyDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT);
478
479 // Report any remaining objects associated with this VkDevice object in LL
480 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
481 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
482 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
483 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
484 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
485 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
486 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
487 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
488 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
489 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
490 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
491 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
492 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
493 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
494 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
495 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
496 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
497 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
498 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
499 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
500 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
501 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
502 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
503
504 // Clean up Queue's MemRef Linked Lists
505 DestroyQueueDataStructures(device);
506
507 lock.unlock();
508
509 dispatch_key key = get_dispatch_key(device);
510 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
511 pDisp->DestroyDevice(device, pAllocator);
512 ot_device_table_map.erase(key);
513}
514
515VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
516 bool skip_call = false;
517 {
518 std::lock_guard<std::mutex> lock(global_lock);
519 skip_call |=
520 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
521 }
522 if (skip_call) {
523 return;
524 }
525 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
526}
527
528VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
529 VkFormatProperties *pFormatProperties) {
530 bool skip_call = false;
531 {
532 std::lock_guard<std::mutex> lock(global_lock);
533 skip_call |=
534 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
535 }
536 if (skip_call) {
537 return;
538 }
539 get_dispatch_table(ot_instance_table_map, physicalDevice)
540 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
541}
542
543VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
544 VkImageType type, VkImageTiling tiling,
545 VkImageUsageFlags usage, VkImageCreateFlags flags,
546 VkImageFormatProperties *pImageFormatProperties) {
547 bool skip_call = false;
548 {
549 std::lock_guard<std::mutex> lock(global_lock);
550 skip_call |=
551 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
552 }
553 if (skip_call) {
554 return VK_ERROR_VALIDATION_FAILED_EXT;
555 }
556 VkResult result =
557 get_dispatch_table(ot_instance_table_map, physicalDevice)
558 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
559 return result;
560}
561
562VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
563 bool skip_call = false;
564 {
565 std::lock_guard<std::mutex> lock(global_lock);
566 skip_call |=
567 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
568 }
569 if (skip_call) {
570 return;
571 }
572 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
573}
574
575VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
576 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
577 bool skip_call = false;
578 {
579 std::lock_guard<std::mutex> lock(global_lock);
580 skip_call |=
581 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
582 }
583 if (skip_call) {
584 return;
585 }
586 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
587}
588
589VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
590
591VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
592
593VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
594 VkExtensionProperties *pProperties);
595
596VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
597
598VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
599 VkLayerProperties *pProperties);
600
601VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
602 bool skip_call = false;
603 {
604 std::lock_guard<std::mutex> lock(global_lock);
605 skip_call |= ValidateNonDispatchableObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
606 if (pSubmits) {
607 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
608 if (pSubmits[idx0].pCommandBuffers) {
609 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
610 skip_call |= ValidateDispatchableObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
611 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
612 }
613 }
614 if (pSubmits[idx0].pSignalSemaphores) {
615 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
616 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
617 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
618 }
619 }
620 if (pSubmits[idx0].pWaitSemaphores) {
621 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
622 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
623 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
624 }
625 }
626 }
627 }
628 if (queue) {
629 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
630 }
631 }
632 if (skip_call) {
633 return VK_ERROR_VALIDATION_FAILED_EXT;
634 }
635 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
636 return result;
637}
638
639VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
640 bool skip_call = false;
641 {
642 std::lock_guard<std::mutex> lock(global_lock);
643 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
644 }
645 if (skip_call) {
646 return VK_ERROR_VALIDATION_FAILED_EXT;
647 }
648 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
649 return result;
650}
651
652VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
653 bool skip_call = false;
654 {
655 std::lock_guard<std::mutex> lock(global_lock);
656 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
657 }
658 if (skip_call) {
659 return VK_ERROR_VALIDATION_FAILED_EXT;
660 }
661 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
662 return result;
663}
664
665VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
666 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
667 bool skip_call = false;
668 {
669 std::lock_guard<std::mutex> lock(global_lock);
670 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
671 }
672 if (skip_call) {
673 return VK_ERROR_VALIDATION_FAILED_EXT;
674 }
675 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
676 {
677 std::lock_guard<std::mutex> lock(global_lock);
678 if (result == VK_SUCCESS) {
679 CreateNonDispatchableObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
680 }
681 }
682 return result;
683}
684
685VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
686 const VkMappedMemoryRange *pMemoryRanges) {
687 bool skip_call = false;
688 {
689 std::lock_guard<std::mutex> lock(global_lock);
690 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
691 if (pMemoryRanges) {
692 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
693 if (pMemoryRanges[idx0].memory) {
694 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
695 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
696 }
697 }
698 }
699 }
700 if (skip_call) {
701 return VK_ERROR_VALIDATION_FAILED_EXT;
702 }
703 VkResult result =
704 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
705 return result;
706}
707
708VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
709 const VkMappedMemoryRange *pMemoryRanges) {
710 bool skip_call = false;
711 {
712 std::lock_guard<std::mutex> lock(global_lock);
713 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
714 if (pMemoryRanges) {
715 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
716 if (pMemoryRanges[idx0].memory) {
717 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
718 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
719 }
720 }
721 }
722 }
723 if (skip_call) {
724 return VK_ERROR_VALIDATION_FAILED_EXT;
725 }
726 VkResult result =
727 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
728 return result;
729}
730
731VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
732 VkDeviceSize *pCommittedMemoryInBytes) {
733 bool skip_call = false;
734 {
735 std::lock_guard<std::mutex> lock(global_lock);
736 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
737 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
738 }
739 if (skip_call) {
740 return;
741 }
742 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
743}
744
745VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
746 VkDeviceSize memoryOffset) {
747 bool skip_call = false;
748 {
749 std::lock_guard<std::mutex> lock(global_lock);
750 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
751 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
752 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
753 }
754 if (skip_call) {
755 return VK_ERROR_VALIDATION_FAILED_EXT;
756 }
757 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
758 return result;
759}
760
761VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
762 bool skip_call = false;
763 {
764 std::lock_guard<std::mutex> lock(global_lock);
765 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
766 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
767 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
768 }
769 if (skip_call) {
770 return VK_ERROR_VALIDATION_FAILED_EXT;
771 }
772 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
773 return result;
774}
775
776VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
777 VkMemoryRequirements *pMemoryRequirements) {
778 bool skip_call = false;
779 {
780 std::lock_guard<std::mutex> lock(global_lock);
781 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
782 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
783 }
784 if (skip_call) {
785 return;
786 }
787 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
788}
789
790VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
791 bool skip_call = false;
792 {
793 std::lock_guard<std::mutex> lock(global_lock);
794 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
795 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
796 }
797 if (skip_call) {
798 return;
799 }
800 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
801}
802
803VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
804 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
805 bool skip_call = false;
806 {
807 std::lock_guard<std::mutex> lock(global_lock);
808 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
809 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
810 }
811 if (skip_call) {
812 return;
813 }
814 get_dispatch_table(ot_device_table_map, device)
815 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
816}
817
818VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
819 VkImageType type, VkSampleCountFlagBits samples,
820 VkImageUsageFlags usage, VkImageTiling tiling,
821 uint32_t *pPropertyCount,
822 VkSparseImageFormatProperties *pProperties) {
823 bool skip_call = false;
824 {
825 std::lock_guard<std::mutex> lock(global_lock);
826 skip_call |=
827 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
828 }
829 if (skip_call) {
830 return;
831 }
832 get_dispatch_table(ot_instance_table_map, physicalDevice)
833 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
834 pProperties);
835}
836
837VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
838 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
839 bool skip_call = false;
840 {
841 std::lock_guard<std::mutex> lock(global_lock);
842 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
843 }
844 if (skip_call) {
845 return VK_ERROR_VALIDATION_FAILED_EXT;
846 }
847 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
848 {
849 std::lock_guard<std::mutex> lock(global_lock);
850 if (result == VK_SUCCESS) {
851 CreateNonDispatchableObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
852 }
853 }
854 return result;
855}
856
857VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
858 bool skip_call = false;
859 {
860 std::lock_guard<std::mutex> lock(global_lock);
861 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
862 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
863 }
864 if (skip_call) {
865 return;
866 }
867 {
868 std::lock_guard<std::mutex> lock(global_lock);
869 DestroyNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
870 }
871 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
872}
873
874VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
875 bool skip_call = false;
876 {
877 std::lock_guard<std::mutex> lock(global_lock);
878 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
879 if (pFences) {
880 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
881 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
882 }
883 }
884 }
885 if (skip_call) {
886 return VK_ERROR_VALIDATION_FAILED_EXT;
887 }
888 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
889 return result;
890}
891
892VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
893 bool skip_call = false;
894 {
895 std::lock_guard<std::mutex> lock(global_lock);
896 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
897 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
898 }
899 if (skip_call) {
900 return VK_ERROR_VALIDATION_FAILED_EXT;
901 }
902 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
903 return result;
904}
905
906VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
907 uint64_t timeout) {
908 bool skip_call = false;
909 {
910 std::lock_guard<std::mutex> lock(global_lock);
911 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
912 if (pFences) {
913 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
914 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
915 }
916 }
917 }
918 if (skip_call) {
919 return VK_ERROR_VALIDATION_FAILED_EXT;
920 }
921 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
922 return result;
923}
924
925VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
926 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
927 bool skip_call = false;
928 {
929 std::lock_guard<std::mutex> lock(global_lock);
930 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
931 }
932 if (skip_call) {
933 return VK_ERROR_VALIDATION_FAILED_EXT;
934 }
935 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
936 {
937 std::lock_guard<std::mutex> lock(global_lock);
938 if (result == VK_SUCCESS) {
939 CreateNonDispatchableObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
940 }
941 }
942 return result;
943}
944
945VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
946 bool skip_call = false;
947 {
948 std::lock_guard<std::mutex> lock(global_lock);
949 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
950 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
951 }
952 if (skip_call) {
953 return;
954 }
955 {
956 std::lock_guard<std::mutex> lock(global_lock);
957 DestroyNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
958 }
959 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
960}
961
962VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
963 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
964 bool skip_call = false;
965 {
966 std::lock_guard<std::mutex> lock(global_lock);
967 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
968 }
969 if (skip_call) {
970 return VK_ERROR_VALIDATION_FAILED_EXT;
971 }
972 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
973 {
974 std::lock_guard<std::mutex> lock(global_lock);
975 if (result == VK_SUCCESS) {
976 CreateNonDispatchableObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
977 }
978 }
979 return result;
980}
981
982VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
983 bool skip_call = false;
984 {
985 std::lock_guard<std::mutex> lock(global_lock);
986 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
987 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
988 }
989 if (skip_call) {
990 return;
991 }
992 {
993 std::lock_guard<std::mutex> lock(global_lock);
994 DestroyNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
995 }
996 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
997}
998
999VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
1000 bool skip_call = false;
1001 {
1002 std::lock_guard<std::mutex> lock(global_lock);
1003 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1004 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1005 }
1006 if (skip_call) {
1007 return VK_ERROR_VALIDATION_FAILED_EXT;
1008 }
1009 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
1010 return result;
1011}
1012
1013VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
1014 bool skip_call = false;
1015 {
1016 std::lock_guard<std::mutex> lock(global_lock);
1017 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1018 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1019 }
1020 if (skip_call) {
1021 return VK_ERROR_VALIDATION_FAILED_EXT;
1022 }
1023 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
1024 return result;
1025}
1026
1027VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
1028 bool skip_call = false;
1029 {
1030 std::lock_guard<std::mutex> lock(global_lock);
1031 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1032 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1033 }
1034 if (skip_call) {
1035 return VK_ERROR_VALIDATION_FAILED_EXT;
1036 }
1037 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
1038 return result;
1039}
1040
1041VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1042 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1043 bool skip_call = false;
1044 {
1045 std::lock_guard<std::mutex> lock(global_lock);
1046 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1047 }
1048 if (skip_call) {
1049 return VK_ERROR_VALIDATION_FAILED_EXT;
1050 }
1051 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1052 {
1053 std::lock_guard<std::mutex> lock(global_lock);
1054 if (result == VK_SUCCESS) {
1055 CreateNonDispatchableObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
1056 }
1057 }
1058 return result;
1059}
1060
1061VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1062 bool skip_call = false;
1063 {
1064 std::lock_guard<std::mutex> lock(global_lock);
1065 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1066 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1067 }
1068 if (skip_call) {
1069 return;
1070 }
1071 {
1072 std::lock_guard<std::mutex> lock(global_lock);
1073 DestroyNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
1074 }
1075 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1076}
1077
1078VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1079 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1080 bool skip_call = false;
1081 {
1082 std::lock_guard<std::mutex> lock(global_lock);
1083 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1084 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1085 }
1086 if (skip_call) {
1087 return VK_ERROR_VALIDATION_FAILED_EXT;
1088 }
1089 VkResult result = get_dispatch_table(ot_device_table_map, device)
1090 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1091 return result;
1092}
1093
1094VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1095 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1096 bool skip_call = false;
1097 {
1098 std::lock_guard<std::mutex> lock(global_lock);
1099 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1100 }
1101 if (skip_call) {
1102 return VK_ERROR_VALIDATION_FAILED_EXT;
1103 }
1104 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1105 {
1106 std::lock_guard<std::mutex> lock(global_lock);
1107 if (result == VK_SUCCESS) {
1108 CreateNonDispatchableObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
1109 }
1110 }
1111 return result;
1112}
1113
1114VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1115 bool skip_call = false;
1116 {
1117 std::lock_guard<std::mutex> lock(global_lock);
1118 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1119 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1120 }
1121 if (skip_call) {
1122 return;
1123 }
1124 {
1125 std::lock_guard<std::mutex> lock(global_lock);
1126 DestroyNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
1127 }
1128 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1129}
1130
1131VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1132 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1133 bool skip_call = false;
1134 {
1135 std::lock_guard<std::mutex> lock(global_lock);
1136 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1137 if (pCreateInfo) {
1138 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1139 }
1140 }
1141 if (skip_call) {
1142 return VK_ERROR_VALIDATION_FAILED_EXT;
1143 }
1144 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1145 {
1146 std::lock_guard<std::mutex> lock(global_lock);
1147 if (result == VK_SUCCESS) {
1148 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
1149 }
1150 }
1151 return result;
1152}
1153
1154VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1155 bool skip_call = false;
1156 {
1157 std::lock_guard<std::mutex> lock(global_lock);
1158 skip_call |= ValidateNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1159 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1160 }
1161 if (skip_call) {
1162 return;
1163 }
1164 {
1165 std::lock_guard<std::mutex> lock(global_lock);
1166 DestroyNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
1167 }
1168 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1169}
1170
1171VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1172 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1173 bool skip_call = false;
1174 {
1175 std::lock_guard<std::mutex> lock(global_lock);
1176 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1177 }
1178 if (skip_call) {
1179 return VK_ERROR_VALIDATION_FAILED_EXT;
1180 }
1181 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1182 {
1183 std::lock_guard<std::mutex> lock(global_lock);
1184 if (result == VK_SUCCESS) {
1185 CreateNonDispatchableObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
1186 }
1187 }
1188 return result;
1189}
1190
1191VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1192 bool skip_call = false;
1193 {
1194 std::lock_guard<std::mutex> lock(global_lock);
1195 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1196 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1197 }
1198 if (skip_call) {
1199 return;
1200 }
1201 {
1202 std::lock_guard<std::mutex> lock(global_lock);
1203 DestroyNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
1204 }
1205 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1206}
1207
1208VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1209 VkSubresourceLayout *pLayout) {
1210 bool skip_call = false;
1211 {
1212 std::lock_guard<std::mutex> lock(global_lock);
1213 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1214 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1215 }
1216 if (skip_call) {
1217 return;
1218 }
1219 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1220}
1221
1222VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1223 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1224 bool skip_call = false;
1225 {
1226 std::lock_guard<std::mutex> lock(global_lock);
1227 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1228 if (pCreateInfo) {
1229 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1230 }
1231 }
1232 if (skip_call) {
1233 return VK_ERROR_VALIDATION_FAILED_EXT;
1234 }
1235 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1236 {
1237 std::lock_guard<std::mutex> lock(global_lock);
1238 if (result == VK_SUCCESS) {
1239 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
1240 }
1241 }
1242 return result;
1243}
1244
1245VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1246 bool skip_call = false;
1247 {
1248 std::lock_guard<std::mutex> lock(global_lock);
1249 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1250 skip_call |= ValidateNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1251 }
1252 if (skip_call) {
1253 return;
1254 }
1255 {
1256 std::lock_guard<std::mutex> lock(global_lock);
1257 DestroyNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
1258 }
1259 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1260}
1261
1262VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1263 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1264 bool skip_call = false;
1265 {
1266 std::lock_guard<std::mutex> lock(global_lock);
1267 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1268 }
1269 if (skip_call) {
1270 return VK_ERROR_VALIDATION_FAILED_EXT;
1271 }
1272 VkResult result =
1273 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1274 {
1275 std::lock_guard<std::mutex> lock(global_lock);
1276 if (result == VK_SUCCESS) {
1277 CreateNonDispatchableObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
1278 }
1279 }
1280 return result;
1281}
1282
1283VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1284 const VkAllocationCallbacks *pAllocator) {
1285 bool skip_call = false;
1286 {
1287 std::lock_guard<std::mutex> lock(global_lock);
1288 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1289 skip_call |= ValidateNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
1290 }
1291 if (skip_call) {
1292 return;
1293 }
1294 {
1295 std::lock_guard<std::mutex> lock(global_lock);
1296 DestroyNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
1297 }
1298 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1299}
1300
1301VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1302 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1303 bool skip_call = false;
1304 {
1305 std::lock_guard<std::mutex> lock(global_lock);
1306 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1307 }
1308 if (skip_call) {
1309 return VK_ERROR_VALIDATION_FAILED_EXT;
1310 }
1311 VkResult result =
1312 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1313 {
1314 std::lock_guard<std::mutex> lock(global_lock);
1315 if (result == VK_SUCCESS) {
1316 CreateNonDispatchableObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
1317 }
1318 }
1319 return result;
1320}
1321
1322VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1323 const VkAllocationCallbacks *pAllocator) {
1324 bool skip_call = false;
1325 {
1326 std::lock_guard<std::mutex> lock(global_lock);
1327 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1328 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1329 }
1330 if (skip_call) {
1331 return;
1332 }
1333 {
1334 std::lock_guard<std::mutex> lock(global_lock);
1335 DestroyNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
1336 }
1337 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1338}
1339
1340VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1341 void *pData) {
1342 bool skip_call = false;
1343 {
1344 std::lock_guard<std::mutex> lock(global_lock);
1345 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1346 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1347 }
1348 if (skip_call) {
1349 return VK_ERROR_VALIDATION_FAILED_EXT;
1350 }
1351 VkResult result =
1352 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1353 return result;
1354}
1355
1356VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1357 const VkPipelineCache *pSrcCaches) {
1358 bool skip_call = false;
1359 {
1360 std::lock_guard<std::mutex> lock(global_lock);
1361 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1362 skip_call |= ValidateNonDispatchableObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1363 if (pSrcCaches) {
1364 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1365 skip_call |=
1366 ValidateNonDispatchableObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1367 }
1368 }
1369 }
1370 if (skip_call) {
1371 return VK_ERROR_VALIDATION_FAILED_EXT;
1372 }
1373 VkResult result =
1374 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1375 return result;
1376}
1377
1378VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1379 bool skip_call = false;
1380 {
1381 std::lock_guard<std::mutex> lock(global_lock);
1382 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1383 skip_call |= ValidateNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1384 }
1385 if (skip_call) {
1386 return;
1387 }
1388 {
1389 std::lock_guard<std::mutex> lock(global_lock);
1390 DestroyNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
1391 }
1392 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1393}
1394
1395VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1396 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1397 bool skip_call = false;
1398 {
1399 std::lock_guard<std::mutex> lock(global_lock);
1400 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1401 if (pCreateInfo) {
1402 if (pCreateInfo->pSetLayouts) {
1403 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
1404 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pSetLayouts[idx0],
1405 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1406 }
1407 }
1408 }
1409 }
1410 if (skip_call) {
1411 return VK_ERROR_VALIDATION_FAILED_EXT;
1412 }
1413 VkResult result =
1414 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1415 {
1416 std::lock_guard<std::mutex> lock(global_lock);
1417 if (result == VK_SUCCESS) {
1418 CreateNonDispatchableObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
1419 }
1420 }
1421 return result;
1422}
1423
1424VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1425 const VkAllocationCallbacks *pAllocator) {
1426 bool skip_call = false;
1427 {
1428 std::lock_guard<std::mutex> lock(global_lock);
1429 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1430 skip_call |= ValidateNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1431 }
1432 if (skip_call) {
1433 return;
1434 }
1435 {
1436 std::lock_guard<std::mutex> lock(global_lock);
1437 DestroyNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
1438 }
1439 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1440}
1441
1442VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1443 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1444 bool skip_call = false;
1445 {
1446 std::lock_guard<std::mutex> lock(global_lock);
1447 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1448 }
1449 if (skip_call) {
1450 return VK_ERROR_VALIDATION_FAILED_EXT;
1451 }
1452 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1453 {
1454 std::lock_guard<std::mutex> lock(global_lock);
1455 if (result == VK_SUCCESS) {
1456 CreateNonDispatchableObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
1457 }
1458 }
1459 return result;
1460}
1461
1462VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1463 bool skip_call = false;
1464 {
1465 std::lock_guard<std::mutex> lock(global_lock);
1466 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1467 skip_call |= ValidateNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1468 }
1469 if (skip_call) {
1470 return;
1471 }
1472 {
1473 std::lock_guard<std::mutex> lock(global_lock);
1474 DestroyNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
1475 }
1476 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1477}
1478
1479VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1480 const VkAllocationCallbacks *pAllocator,
1481 VkDescriptorSetLayout *pSetLayout) {
1482 bool skip_call = false;
1483 {
1484 std::lock_guard<std::mutex> lock(global_lock);
1485 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1486 if (pCreateInfo) {
1487 if (pCreateInfo->pBindings) {
1488 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1489 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1490 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1491 skip_call |=
1492 ValidateNonDispatchableObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
1493 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1494 }
1495 }
1496 }
1497 }
1498 }
1499 }
1500 if (skip_call) {
1501 return VK_ERROR_VALIDATION_FAILED_EXT;
1502 }
1503 VkResult result =
1504 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1505 {
1506 std::lock_guard<std::mutex> lock(global_lock);
1507 if (result == VK_SUCCESS) {
1508 CreateNonDispatchableObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
1509 }
1510 }
1511 return result;
1512}
1513
1514VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1515 const VkAllocationCallbacks *pAllocator) {
1516 bool skip_call = false;
1517 {
1518 std::lock_guard<std::mutex> lock(global_lock);
1519 skip_call |= ValidateNonDispatchableObject(device, descriptorSetLayout,
1520 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1521 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1522 }
1523 if (skip_call) {
1524 return;
1525 }
1526 {
1527 std::lock_guard<std::mutex> lock(global_lock);
1528 DestroyNonDispatchableObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
1529 }
1530 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1531}
1532
1533VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1534 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1535 bool skip_call = false;
1536 {
1537 std::lock_guard<std::mutex> lock(global_lock);
1538 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1539 }
1540 if (skip_call) {
1541 return VK_ERROR_VALIDATION_FAILED_EXT;
1542 }
1543 VkResult result =
1544 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1545 {
1546 std::lock_guard<std::mutex> lock(global_lock);
1547 if (result == VK_SUCCESS) {
1548 CreateNonDispatchableObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
1549 }
1550 }
1551 return result;
1552}
1553
1554VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1555 VkDescriptorPoolResetFlags flags) {
1556 bool skip_call = false;
1557 {
1558 std::lock_guard<std::mutex> lock(global_lock);
1559 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1560 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1561 }
1562 if (skip_call) {
1563 return VK_ERROR_VALIDATION_FAILED_EXT;
1564 }
1565 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1566 return result;
1567}
1568
1569VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1570 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1571 const VkCopyDescriptorSet *pDescriptorCopies) {
1572 bool skip_call = false;
1573 {
1574 std::lock_guard<std::mutex> lock(global_lock);
1575 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1576 if (pDescriptorCopies) {
1577 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1578 if (pDescriptorCopies[idx0].dstSet) {
1579 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].dstSet,
1580 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1581 }
1582 if (pDescriptorCopies[idx0].srcSet) {
1583 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].srcSet,
1584 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1585 }
1586 }
1587 }
1588 if (pDescriptorWrites) {
1589 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1590 if (pDescriptorWrites[idx1].dstSet) {
1591 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].dstSet,
1592 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1593 }
1594 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1595 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1596 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1597 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1598 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1599 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
1600 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
1601 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1602 }
1603 }
1604 }
1605 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1606 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1607 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1608 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1609 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1610 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1611 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
1612 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
1613 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1614 }
1615 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
1616 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
1617 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1618 }
1619 }
1620 }
1621 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1622 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1623 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
1624 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
1625 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1626 }
1627 }
1628 }
1629 }
1630 }
1631 if (skip_call) {
1632 return;
1633 }
1634 get_dispatch_table(ot_device_table_map, device)
1635 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1636}
1637
1638VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1639 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1640 bool skip_call = false;
1641 {
1642 std::lock_guard<std::mutex> lock(global_lock);
1643 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1644 if (pCreateInfo) {
1645 if (pCreateInfo->pAttachments) {
1646 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
1647 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pAttachments[idx0],
1648 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1649 }
1650 }
1651 if (pCreateInfo->renderPass) {
1652 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->renderPass,
1653 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1654 }
1655 }
1656 }
1657 if (skip_call) {
1658 return VK_ERROR_VALIDATION_FAILED_EXT;
1659 }
1660 VkResult result =
1661 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1662 {
1663 std::lock_guard<std::mutex> lock(global_lock);
1664 if (result == VK_SUCCESS) {
1665 CreateNonDispatchableObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
1666 }
1667 }
1668 return result;
1669}
1670
1671VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1672 bool skip_call = false;
1673 {
1674 std::lock_guard<std::mutex> lock(global_lock);
1675 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1676 skip_call |= ValidateNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
1677 }
1678 if (skip_call) {
1679 return;
1680 }
1681 {
1682 std::lock_guard<std::mutex> lock(global_lock);
1683 DestroyNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
1684 }
1685 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1686}
1687
1688VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1689 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1690 bool skip_call = false;
1691 {
1692 std::lock_guard<std::mutex> lock(global_lock);
1693 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1694 }
1695 if (skip_call) {
1696 return VK_ERROR_VALIDATION_FAILED_EXT;
1697 }
1698 VkResult result =
1699 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1700 {
1701 std::lock_guard<std::mutex> lock(global_lock);
1702 if (result == VK_SUCCESS) {
1703 CreateNonDispatchableObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
1704 }
1705 }
1706 return result;
1707}
1708
1709VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1710 bool skip_call = false;
1711 {
1712 std::lock_guard<std::mutex> lock(global_lock);
1713 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1714 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1715 }
1716 if (skip_call) {
1717 return;
1718 }
1719 {
1720 std::lock_guard<std::mutex> lock(global_lock);
1721 DestroyNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
1722 }
1723 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1724}
1725
1726VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1727 bool skip_call = false;
1728 {
1729 std::lock_guard<std::mutex> lock(global_lock);
1730 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1731 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1732 }
1733 if (skip_call) {
1734 return;
1735 }
1736 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1737}
1738
1739VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1740 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1741 bool skip_call = false;
1742 {
1743 std::lock_guard<std::mutex> lock(global_lock);
1744 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1745 }
1746 if (skip_call) {
1747 return VK_ERROR_VALIDATION_FAILED_EXT;
1748 }
1749 VkResult result =
1750 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1751 {
1752 std::lock_guard<std::mutex> lock(global_lock);
1753 if (result == VK_SUCCESS) {
1754 CreateNonDispatchableObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
1755 }
1756 }
1757 return result;
1758}
1759
1760VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1761 bool skip_call = false;
1762 {
1763 std::lock_guard<std::mutex> lock(global_lock);
1764 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1765 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1766 }
1767 if (skip_call) {
1768 return VK_ERROR_VALIDATION_FAILED_EXT;
1769 }
1770 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1771 return result;
1772}
1773
1774VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1775 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1776 bool skip_call = false;
1777 {
1778 std::lock_guard<std::mutex> lock(global_lock);
1779 skip_call |=
1780 ValidateDispatchableObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1781 if (begin_info) {
1782 OBJTRACK_NODE *pNode =
1783 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1784 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
1785 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
1786 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
1787 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
1788 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1789 }
1790 }
1791 }
1792 if (skip_call) {
1793 return VK_ERROR_VALIDATION_FAILED_EXT;
1794 }
1795 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1796 return result;
1797}
1798
1799VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1800 bool skip_call = false;
1801 {
1802 std::lock_guard<std::mutex> lock(global_lock);
1803 skip_call |=
1804 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1805 }
1806 if (skip_call) {
1807 return VK_ERROR_VALIDATION_FAILED_EXT;
1808 }
1809 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1810 return result;
1811}
1812
1813VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1814 bool skip_call = false;
1815 {
1816 std::lock_guard<std::mutex> lock(global_lock);
1817 skip_call |=
1818 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1819 }
1820 if (skip_call) {
1821 return VK_ERROR_VALIDATION_FAILED_EXT;
1822 }
1823 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1824 return result;
1825}
1826
1827VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1828 VkPipeline pipeline) {
1829 bool skip_call = false;
1830 {
1831 std::lock_guard<std::mutex> lock(global_lock);
1832 skip_call |=
1833 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1834 skip_call |= ValidateNonDispatchableObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1835 }
1836 if (skip_call) {
1837 return;
1838 }
1839 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1840}
1841
1842VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1843 const VkViewport *pViewports) {
1844 bool skip_call = false;
1845 {
1846 std::lock_guard<std::mutex> lock(global_lock);
1847 skip_call |=
1848 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1849 }
1850 if (skip_call) {
1851 return;
1852 }
1853 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1854}
1855
1856VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1857 const VkRect2D *pScissors) {
1858 bool skip_call = false;
1859 {
1860 std::lock_guard<std::mutex> lock(global_lock);
1861 skip_call |=
1862 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1863 }
1864 if (skip_call) {
1865 return;
1866 }
1867 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1868}
1869
1870VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1871 bool skip_call = false;
1872 {
1873 std::lock_guard<std::mutex> lock(global_lock);
1874 skip_call |=
1875 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1876 }
1877 if (skip_call) {
1878 return;
1879 }
1880 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1881}
1882
1883VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1884 float depthBiasSlopeFactor) {
1885 bool skip_call = false;
1886 {
1887 std::lock_guard<std::mutex> lock(global_lock);
1888 skip_call |=
1889 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1890 }
1891 if (skip_call) {
1892 return;
1893 }
1894 get_dispatch_table(ot_device_table_map, commandBuffer)
1895 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1896}
1897
1898VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1899 bool skip_call = false;
1900 {
1901 std::lock_guard<std::mutex> lock(global_lock);
1902 skip_call |=
1903 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1904 }
1905 if (skip_call) {
1906 return;
1907 }
1908 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1909}
1910
1911VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1912 bool skip_call = false;
1913 {
1914 std::lock_guard<std::mutex> lock(global_lock);
1915 skip_call |=
1916 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1917 }
1918 if (skip_call) {
1919 return;
1920 }
1921 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1922}
1923
1924VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1925 uint32_t compareMask) {
1926 bool skip_call = false;
1927 {
1928 std::lock_guard<std::mutex> lock(global_lock);
1929 skip_call |=
1930 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1931 }
1932 if (skip_call) {
1933 return;
1934 }
1935 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1936}
1937
1938VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1939 bool skip_call = false;
1940 {
1941 std::lock_guard<std::mutex> lock(global_lock);
1942 skip_call |=
1943 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1944 }
1945 if (skip_call) {
1946 return;
1947 }
1948 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1949}
1950
1951VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1952 bool skip_call = false;
1953 {
1954 std::lock_guard<std::mutex> lock(global_lock);
1955 skip_call |=
1956 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1957 }
1958 if (skip_call) {
1959 return;
1960 }
1961 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1962}
1963
1964VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1965 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1966 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1967 const uint32_t *pDynamicOffsets) {
1968 bool skip_call = false;
1969 {
1970 std::lock_guard<std::mutex> lock(global_lock);
1971 skip_call |=
1972 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1973 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1974 if (pDescriptorSets) {
1975 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
1976 skip_call |= ValidateNonDispatchableObject(commandBuffer, pDescriptorSets[idx0],
1977 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1978 }
1979 }
1980 }
1981 if (skip_call) {
1982 return;
1983 }
1984 get_dispatch_table(ot_device_table_map, commandBuffer)
1985 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
1986 dynamicOffsetCount, pDynamicOffsets);
1987}
1988
1989VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1990 VkIndexType indexType) {
1991 bool skip_call = false;
1992 {
1993 std::lock_guard<std::mutex> lock(global_lock);
1994 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1995 skip_call |=
1996 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1997 }
1998 if (skip_call) {
1999 return;
2000 }
2001 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
2002}
2003
2004VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
2005 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
2006 bool skip_call = false;
2007 {
2008 std::lock_guard<std::mutex> lock(global_lock);
2009 skip_call |=
2010 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2011 if (pBuffers) {
2012 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
2013 skip_call |=
2014 ValidateNonDispatchableObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2015 }
2016 }
2017 }
2018 if (skip_call) {
2019 return;
2020 }
2021 get_dispatch_table(ot_device_table_map, commandBuffer)
2022 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
2023}
2024
2025VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
2026 uint32_t firstVertex, uint32_t firstInstance) {
2027 bool skip_call = false;
2028 {
2029 std::lock_guard<std::mutex> lock(global_lock);
2030 skip_call |=
2031 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2032 }
2033 if (skip_call) {
2034 return;
2035 }
2036 get_dispatch_table(ot_device_table_map, commandBuffer)
2037 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2038}
2039
2040VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2041 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2042 bool skip_call = false;
2043 {
2044 std::lock_guard<std::mutex> lock(global_lock);
2045 skip_call |=
2046 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2047 }
2048 if (skip_call) {
2049 return;
2050 }
2051 get_dispatch_table(ot_device_table_map, commandBuffer)
2052 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2053}
2054
2055VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2056 uint32_t stride) {
2057 bool skip_call = false;
2058 {
2059 std::lock_guard<std::mutex> lock(global_lock);
2060 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2061 skip_call |=
2062 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2063 }
2064 if (skip_call) {
2065 return;
2066 }
2067 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2068}
2069
2070VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2071 uint32_t drawCount, uint32_t stride) {
2072 bool skip_call = false;
2073 {
2074 std::lock_guard<std::mutex> lock(global_lock);
2075 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2076 skip_call |=
2077 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2078 }
2079 if (skip_call) {
2080 return;
2081 }
2082 get_dispatch_table(ot_device_table_map, commandBuffer)
2083 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2084}
2085
2086VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2087 bool skip_call = false;
2088 {
2089 std::lock_guard<std::mutex> lock(global_lock);
2090 skip_call |=
2091 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2092 }
2093 if (skip_call) {
2094 return;
2095 }
2096 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2097}
2098
2099VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2100 bool skip_call = false;
2101 {
2102 std::lock_guard<std::mutex> lock(global_lock);
2103 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2104 skip_call |=
2105 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2106 }
2107 if (skip_call) {
2108 return;
2109 }
2110 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2111}
2112
2113VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2114 uint32_t regionCount, const VkBufferCopy *pRegions) {
2115 bool skip_call = false;
2116 {
2117 std::lock_guard<std::mutex> lock(global_lock);
2118 skip_call |=
2119 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2120 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2121 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2122 }
2123 if (skip_call) {
2124 return;
2125 }
2126 get_dispatch_table(ot_device_table_map, commandBuffer)
2127 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2128}
2129
2130VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2131 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2132 const VkImageCopy *pRegions) {
2133 bool skip_call = false;
2134 {
2135 std::lock_guard<std::mutex> lock(global_lock);
2136 skip_call |=
2137 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2138 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2139 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2140 }
2141 if (skip_call) {
2142 return;
2143 }
2144 get_dispatch_table(ot_device_table_map, commandBuffer)
2145 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2146}
2147
2148VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2149 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2150 const VkImageBlit *pRegions, VkFilter filter) {
2151 bool skip_call = false;
2152 {
2153 std::lock_guard<std::mutex> lock(global_lock);
2154 skip_call |=
2155 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2156 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2157 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2158 }
2159 if (skip_call) {
2160 return;
2161 }
2162 get_dispatch_table(ot_device_table_map, commandBuffer)
2163 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2164}
2165
2166VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2167 VkImageLayout dstImageLayout, uint32_t regionCount,
2168 const VkBufferImageCopy *pRegions) {
2169 bool skip_call = false;
2170 {
2171 std::lock_guard<std::mutex> lock(global_lock);
2172 skip_call |=
2173 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2174 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2175 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2176 }
2177 if (skip_call) {
2178 return;
2179 }
2180 get_dispatch_table(ot_device_table_map, commandBuffer)
2181 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2182}
2183
2184VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2185 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2186 bool skip_call = false;
2187 {
2188 std::lock_guard<std::mutex> lock(global_lock);
2189 skip_call |=
2190 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2191 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2192 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2193 }
2194 if (skip_call) {
2195 return;
2196 }
2197 get_dispatch_table(ot_device_table_map, commandBuffer)
2198 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2199}
2200
2201VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2202 VkDeviceSize dataSize, const uint32_t *pData) {
2203 bool skip_call = false;
2204 {
2205 std::lock_guard<std::mutex> lock(global_lock);
2206 skip_call |=
2207 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2208 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2209 }
2210 if (skip_call) {
2211 return;
2212 }
2213 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2214}
2215
2216VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2217 VkDeviceSize size, uint32_t data) {
2218 bool skip_call = false;
2219 {
2220 std::lock_guard<std::mutex> lock(global_lock);
2221 skip_call |=
2222 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2223 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2224 }
2225 if (skip_call) {
2226 return;
2227 }
2228 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2229}
2230
2231VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2232 const VkClearColorValue *pColor, uint32_t rangeCount,
2233 const VkImageSubresourceRange *pRanges) {
2234 bool skip_call = false;
2235 {
2236 std::lock_guard<std::mutex> lock(global_lock);
2237 skip_call |=
2238 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2239 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2240 }
2241 if (skip_call) {
2242 return;
2243 }
2244 get_dispatch_table(ot_device_table_map, commandBuffer)
2245 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2246}
2247
2248VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2249 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2250 const VkImageSubresourceRange *pRanges) {
2251 bool skip_call = false;
2252 {
2253 std::lock_guard<std::mutex> lock(global_lock);
2254 skip_call |=
2255 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2256 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2257 }
2258 if (skip_call) {
2259 return;
2260 }
2261 get_dispatch_table(ot_device_table_map, commandBuffer)
2262 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2263}
2264
2265VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2266 const VkClearAttachment *pAttachments, uint32_t rectCount,
2267 const VkClearRect *pRects) {
2268 bool skip_call = false;
2269 {
2270 std::lock_guard<std::mutex> lock(global_lock);
2271 skip_call |=
2272 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2273 }
2274 if (skip_call) {
2275 return;
2276 }
2277 get_dispatch_table(ot_device_table_map, commandBuffer)
2278 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2279}
2280
2281VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2282 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2283 const VkImageResolve *pRegions) {
2284 bool skip_call = false;
2285 {
2286 std::lock_guard<std::mutex> lock(global_lock);
2287 skip_call |=
2288 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2289 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2290 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2291 }
2292 if (skip_call) {
2293 return;
2294 }
2295 get_dispatch_table(ot_device_table_map, commandBuffer)
2296 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2297}
2298
2299VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2300 bool skip_call = false;
2301 {
2302 std::lock_guard<std::mutex> lock(global_lock);
2303 skip_call |=
2304 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2305 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2306 }
2307 if (skip_call) {
2308 return;
2309 }
2310 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2311}
2312
2313VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2314 bool skip_call = false;
2315 {
2316 std::lock_guard<std::mutex> lock(global_lock);
2317 skip_call |=
2318 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2319 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2320 }
2321 if (skip_call) {
2322 return;
2323 }
2324 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2325}
2326
2327VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2328 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2329 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2330 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2331 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2332 bool skip_call = false;
2333 {
2334 std::lock_guard<std::mutex> lock(global_lock);
2335 skip_call |=
2336 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2337 if (pBufferMemoryBarriers) {
2338 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2339 if (pBufferMemoryBarriers[idx0].buffer) {
2340 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2341 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2342 }
2343 }
2344 }
2345 if (pEvents) {
2346 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2347 skip_call |=
2348 ValidateNonDispatchableObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2349 }
2350 }
2351 if (pImageMemoryBarriers) {
2352 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2353 if (pImageMemoryBarriers[idx2].image) {
2354 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx2].image,
2355 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2356 }
2357 }
2358 }
2359 }
2360 if (skip_call) {
2361 return;
2362 }
2363 get_dispatch_table(ot_device_table_map, commandBuffer)
2364 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2365 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2366}
2367
2368VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2369 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2370 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2371 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2372 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2373 bool skip_call = false;
2374 {
2375 std::lock_guard<std::mutex> lock(global_lock);
2376 skip_call |=
2377 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2378 if (pBufferMemoryBarriers) {
2379 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2380 if (pBufferMemoryBarriers[idx0].buffer) {
2381 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2382 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2383 }
2384 }
2385 }
2386 if (pImageMemoryBarriers) {
2387 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2388 if (pImageMemoryBarriers[idx1].image) {
2389 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx1].image,
2390 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2391 }
2392 }
2393 }
2394 }
2395 if (skip_call) {
2396 return;
2397 }
2398 get_dispatch_table(ot_device_table_map, commandBuffer)
2399 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2400 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2401}
2402
2403VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2404 VkQueryControlFlags flags) {
2405 bool skip_call = false;
2406 {
2407 std::lock_guard<std::mutex> lock(global_lock);
2408 skip_call |=
2409 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2410 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2411 }
2412 if (skip_call) {
2413 return;
2414 }
2415 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2416}
2417
2418VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2419 bool skip_call = false;
2420 {
2421 std::lock_guard<std::mutex> lock(global_lock);
2422 skip_call |=
2423 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2424 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2425 }
2426 if (skip_call) {
2427 return;
2428 }
2429 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2430}
2431
2432VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2433 uint32_t queryCount) {
2434 bool skip_call = false;
2435 {
2436 std::lock_guard<std::mutex> lock(global_lock);
2437 skip_call |=
2438 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2439 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2440 }
2441 if (skip_call) {
2442 return;
2443 }
2444 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2445}
2446
2447VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2448 VkQueryPool queryPool, uint32_t query) {
2449 bool skip_call = false;
2450 {
2451 std::lock_guard<std::mutex> lock(global_lock);
2452 skip_call |=
2453 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2454 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2455 }
2456 if (skip_call) {
2457 return;
2458 }
2459 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2460}
2461
2462VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2463 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2464 VkDeviceSize stride, VkQueryResultFlags flags) {
2465 bool skip_call = false;
2466 {
2467 std::lock_guard<std::mutex> lock(global_lock);
2468 skip_call |=
2469 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2470 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2471 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2472 }
2473 if (skip_call) {
2474 return;
2475 }
2476 get_dispatch_table(ot_device_table_map, commandBuffer)
2477 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2478}
2479
2480VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2481 uint32_t offset, uint32_t size, const void *pValues) {
2482 bool skip_call = false;
2483 {
2484 std::lock_guard<std::mutex> lock(global_lock);
2485 skip_call |=
2486 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2487 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
2488 }
2489 if (skip_call) {
2490 return;
2491 }
2492 get_dispatch_table(ot_device_table_map, commandBuffer)
2493 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2494}
2495
2496VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2497 VkSubpassContents contents) {
2498 bool skip_call = false;
2499 {
2500 std::lock_guard<std::mutex> lock(global_lock);
2501 skip_call |=
2502 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2503 if (pRenderPassBegin) {
2504 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->framebuffer,
2505 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
2506 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->renderPass,
2507 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2508 }
2509 }
2510 if (skip_call) {
2511 return;
2512 }
2513 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2514}
2515
2516VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2517 bool skip_call = false;
2518 {
2519 std::lock_guard<std::mutex> lock(global_lock);
2520 skip_call |=
2521 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2522 }
2523 if (skip_call) {
2524 return;
2525 }
2526 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2527}
2528
2529VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2530 bool skip_call = false;
2531 {
2532 std::lock_guard<std::mutex> lock(global_lock);
2533 skip_call |=
2534 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2535 }
2536 if (skip_call) {
2537 return;
2538 }
2539 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2540}
2541
2542VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2543 const VkCommandBuffer *pCommandBuffers) {
2544 bool skip_call = false;
2545 {
2546 std::lock_guard<std::mutex> lock(global_lock);
2547 skip_call |=
2548 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2549 if (pCommandBuffers) {
2550 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
2551 skip_call |= ValidateDispatchableObject(commandBuffer, pCommandBuffers[idx0],
2552 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2553 }
2554 }
2555 }
2556 if (skip_call) {
2557 return;
2558 }
2559 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2560}
2561
2562VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2563 bool skip_call = false;
2564 {
2565 std::lock_guard<std::mutex> lock(global_lock);
2566 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2567 skip_call |= ValidateNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2568 }
2569 if (skip_call) {
2570 return;
2571 }
2572 {
2573 std::lock_guard<std::mutex> lock(global_lock);
2574 DestroyNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2575 }
2576 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2577}
2578
2579VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2580 VkSurfaceKHR surface, VkBool32 *pSupported) {
2581 bool skip_call = false;
2582 {
2583 std::lock_guard<std::mutex> lock(global_lock);
2584 skip_call |=
2585 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2586 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2587 }
2588 if (skip_call) {
2589 return VK_ERROR_VALIDATION_FAILED_EXT;
2590 }
2591 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2592 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2593 return result;
2594}
2595
2596VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2597 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2598 bool skip_call = false;
2599 {
2600 std::lock_guard<std::mutex> lock(global_lock);
2601 skip_call |=
2602 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2603 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2604 }
2605 if (skip_call) {
2606 return VK_ERROR_VALIDATION_FAILED_EXT;
2607 }
2608 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2609 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2610 return result;
2611}
2612
2613VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2614 uint32_t *pSurfaceFormatCount,
2615 VkSurfaceFormatKHR *pSurfaceFormats) {
2616 bool skip_call = false;
2617 {
2618 std::lock_guard<std::mutex> lock(global_lock);
2619 skip_call |=
2620 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2621 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2622 }
2623 if (skip_call) {
2624 return VK_ERROR_VALIDATION_FAILED_EXT;
2625 }
2626 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2627 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2628 return result;
2629}
2630
2631VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2632 uint32_t *pPresentModeCount,
2633 VkPresentModeKHR *pPresentModes) {
2634 bool skip_call = false;
2635 {
2636 std::lock_guard<std::mutex> lock(global_lock);
2637 skip_call |=
2638 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2639 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2640 }
2641 if (skip_call) {
2642 return VK_ERROR_VALIDATION_FAILED_EXT;
2643 }
2644 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2645 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2646 return result;
2647}
2648
2649VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2650 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2651 bool skip_call = false;
2652 {
2653 std::lock_guard<std::mutex> lock(global_lock);
2654 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2655 if (pCreateInfo) {
2656 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->oldSwapchain,
2657 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2658 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2659 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfo->surface,
2660 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2661 }
2662 }
2663 if (skip_call) {
2664 return VK_ERROR_VALIDATION_FAILED_EXT;
2665 }
2666 VkResult result =
2667 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2668 {
2669 std::lock_guard<std::mutex> lock(global_lock);
2670 if (result == VK_SUCCESS) {
2671 CreateNonDispatchableObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
2672 }
2673 }
2674 return result;
2675}
2676
2677VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2678 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2679 bool skip_call = false;
2680 {
2681 std::lock_guard<std::mutex> lock(global_lock);
2682 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2683 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2684 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2685 skip_call |= ValidateNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2686 }
2687 if (skip_call) {
2688 return VK_ERROR_VALIDATION_FAILED_EXT;
2689 }
2690 VkResult result = get_dispatch_table(ot_device_table_map, device)
2691 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2692 return result;
2693}
2694
2695VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2696 bool skip_call = false;
2697 {
2698 std::lock_guard<std::mutex> lock(global_lock);
2699 if (pPresentInfo) {
2700 if (pPresentInfo->pSwapchains) {
2701 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
2702 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pSwapchains[idx0],
2703 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2704 }
2705 }
2706 if (pPresentInfo->pWaitSemaphores) {
2707 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
2708 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pWaitSemaphores[idx1],
2709 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2710 }
2711 }
2712 }
2713 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
2714 }
2715 if (skip_call) {
2716 return VK_ERROR_VALIDATION_FAILED_EXT;
2717 }
2718 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2719 return result;
2720}
2721
2722#ifdef VK_USE_PLATFORM_WIN32_KHR
2723VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2724 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2725 bool skip_call = false;
2726 {
2727 std::lock_guard<std::mutex> lock(global_lock);
2728 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2729 }
2730 if (skip_call) {
2731 return VK_ERROR_VALIDATION_FAILED_EXT;
2732 }
2733 VkResult result =
2734 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2735 {
2736 std::lock_guard<std::mutex> lock(global_lock);
2737 if (result == VK_SUCCESS) {
2738 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2739 }
2740 }
2741 return result;
2742}
2743
2744VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2745 uint32_t queueFamilyIndex) {
2746 bool skip_call = false;
2747 {
2748 std::lock_guard<std::mutex> lock(global_lock);
2749 skip_call |=
2750 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2751 }
2752 if (skip_call) {
2753 return VK_FALSE;
2754 }
2755 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2756 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2757 return result;
2758}
2759#endif // VK_USE_PLATFORM_WIN32_KHR
2760
2761#ifdef VK_USE_PLATFORM_XCB_KHR
2762VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2763 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2764 bool skip_call = false;
2765 {
2766 std::lock_guard<std::mutex> lock(global_lock);
2767 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2768 }
2769 if (skip_call) {
2770 return VK_ERROR_VALIDATION_FAILED_EXT;
2771 }
2772 VkResult result =
2773 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2774 {
2775 std::lock_guard<std::mutex> lock(global_lock);
2776 if (result == VK_SUCCESS) {
2777 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2778 }
2779 }
2780 return result;
2781}
2782
2783VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2784 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2785 xcb_visualid_t visual_id) {
2786 bool skip_call = false;
2787 {
2788 std::lock_guard<std::mutex> lock(global_lock);
2789 skip_call |=
2790 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2791 }
2792 if (skip_call) {
2793 return VK_FALSE;
2794 }
2795 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2796 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2797 return result;
2798}
2799#endif // VK_USE_PLATFORM_XCB_KHR
2800
2801#ifdef VK_USE_PLATFORM_XLIB_KHR
2802VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2803 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2804 bool skip_call = false;
2805 {
2806 std::lock_guard<std::mutex> lock(global_lock);
2807 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2808 }
2809 if (skip_call) {
2810 return VK_ERROR_VALIDATION_FAILED_EXT;
2811 }
2812 VkResult result =
2813 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2814 {
2815 std::lock_guard<std::mutex> lock(global_lock);
2816 if (result == VK_SUCCESS) {
2817 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2818 }
2819 }
2820 return result;
2821}
2822
2823VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2824 uint32_t queueFamilyIndex, Display *dpy,
2825 VisualID visualID) {
2826 bool skip_call = false;
2827 {
2828 std::lock_guard<std::mutex> lock(global_lock);
2829 skip_call |=
2830 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2831 }
2832 if (skip_call) {
2833 return VK_FALSE;
2834 }
2835 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2836 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2837 return result;
2838}
2839#endif // VK_USE_PLATFORM_XLIB_KHR
2840
2841#ifdef VK_USE_PLATFORM_MIR_KHR
2842VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2843 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2844 bool skip_call = false;
2845 {
2846 std::lock_guard<std::mutex> lock(global_lock);
2847 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2848 }
2849 if (skip_call) {
2850 return VK_ERROR_VALIDATION_FAILED_EXT;
2851 }
2852 VkResult result =
2853 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2854 {
2855 std::lock_guard<std::mutex> lock(global_lock);
2856 if (result == VK_SUCCESS) {
2857 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2858 }
2859 }
2860 return result;
2861}
2862
2863VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2864 uint32_t queueFamilyIndex, MirConnection *connection) {
2865 bool skip_call = false;
2866 {
2867 std::lock_guard<std::mutex> lock(global_lock);
2868 skip_call |=
2869 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2870 }
2871 if (skip_call) {
2872 return VK_FALSE;
2873 }
2874 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2875 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2876 return result;
2877}
2878#endif // VK_USE_PLATFORM_MIR_KHR
2879
2880#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2881VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2882 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2883 bool skip_call = false;
2884 {
2885 std::lock_guard<std::mutex> lock(global_lock);
2886 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2887 }
2888 if (skip_call) {
2889 return VK_ERROR_VALIDATION_FAILED_EXT;
2890 }
2891 VkResult result =
2892 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2893 {
2894 std::lock_guard<std::mutex> lock(global_lock);
2895 if (result == VK_SUCCESS) {
2896 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2897 }
2898 }
2899 return result;
2900}
2901
2902VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2903 uint32_t queueFamilyIndex,
2904 struct wl_display *display) {
2905 bool skip_call = false;
2906 {
2907 std::lock_guard<std::mutex> lock(global_lock);
2908 skip_call |=
2909 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2910 }
2911 if (skip_call) {
2912 return VK_FALSE;
2913 }
2914 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2915 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2916 return result;
2917}
2918#endif // VK_USE_PLATFORM_WAYLAND_KHR
2919
2920#ifdef VK_USE_PLATFORM_ANDROID_KHR
2921VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2922 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2923 bool skip_call = false;
2924 {
2925 std::lock_guard<std::mutex> lock(global_lock);
2926 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2927 }
2928 if (skip_call) {
2929 return VK_ERROR_VALIDATION_FAILED_EXT;
2930 }
2931 VkResult result =
2932 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2933 {
2934 std::lock_guard<std::mutex> lock(global_lock);
2935 if (result == VK_SUCCESS) {
2936 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2937 }
2938 }
2939 return result;
2940}
2941#endif // VK_USE_PLATFORM_ANDROID_KHR
2942
Mark Youngead9b932016-09-08 12:28:38 -06002943VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
2944 const VkSwapchainCreateInfoKHR *pCreateInfos,
2945 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
2946 bool skip_call = false;
2947 uint32_t i = 0;
2948 {
2949 std::lock_guard<std::mutex> lock(global_lock);
2950 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2951 if (NULL != pCreateInfos) {
2952 for (i = 0; i < swapchainCount; i++) {
2953 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[i].oldSwapchain,
2954 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2955 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2956 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfos[i].surface,
2957 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2958 }
2959 }
2960 }
2961 if (skip_call) {
2962 return VK_ERROR_VALIDATION_FAILED_EXT;
2963 }
2964 VkResult result =
2965 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
2966 {
2967 std::lock_guard<std::mutex> lock(global_lock);
2968 if (result == VK_SUCCESS) {
2969 for (i = 0; i < swapchainCount; i++) {
2970 CreateNonDispatchableObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
2971 }
2972 }
2973 }
2974 return result;
2975}
2976
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002977VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2978 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2979 const VkAllocationCallbacks *pAllocator,
2980 VkDebugReportCallbackEXT *pCallback) {
2981 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2982 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
2983 if (VK_SUCCESS == result) {
2984 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2985 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
2986 CreateNonDispatchableObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
2987 }
2988 return result;
2989}
2990
2991VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
2992 const VkAllocationCallbacks *pAllocator) {
2993 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2994 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
2995 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2996 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
2997 DestroyNonDispatchableObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
2998}
2999
3000VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
3001 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
3002 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
3003 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
3004 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
3005}
3006
3007static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
3008
3009static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
3010 VK_LAYER_API_VERSION, // specVersion
3011 1, // implementationVersion
3012 "LunarG Validation Layer"};
3013
3014VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
3015 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
3016}
3017
3018VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
3019 VkLayerProperties *pProperties) {
3020 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
3021}
3022
3023VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3024 VkExtensionProperties *pProperties) {
3025 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3026 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
3027
3028 return VK_ERROR_LAYER_NOT_PRESENT;
3029}
3030
3031VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
3032 uint32_t *pCount, VkExtensionProperties *pProperties) {
3033 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3034 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
3035
3036 assert(physicalDevice);
3037 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3038 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3039}
3040
3041static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3042 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3043 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3044}
3045
3046static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3047 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3048 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3049 return nullptr;
3050
3051 if (!strcmp("vkDestroySurfaceKHR", name))
3052 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3053 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3054 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3055 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3056 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3057 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3058 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3059 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3060 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3061
3062#ifdef VK_USE_PLATFORM_WIN32_KHR
3063 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3064 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3065 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3066 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3067#endif // VK_USE_PLATFORM_WIN32_KHR
3068#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003069 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003070 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003071 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003072 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3073#endif // VK_USE_PLATFORM_XCB_KHR
3074#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003075 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003076 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003077 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003078 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3079#endif // VK_USE_PLATFORM_XLIB_KHR
3080#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003081 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003082 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003083 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003084 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3085#endif // VK_USE_PLATFORM_MIR_KHR
3086#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003087 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003088 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003089 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003090 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3091#endif // VK_USE_PLATFORM_WAYLAND_KHR
3092#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003093 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003094 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3095#endif // VK_USE_PLATFORM_ANDROID_KHR
3096
3097 return nullptr;
3098}
3099
3100static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3101 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3102 device_data->wsi_enabled = false;
Mark Youngead9b932016-09-08 12:28:38 -06003103 device_data->wsi_display_swapchain_enabled = false;
3104 device_data->objtrack_extensions_enabled = false;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003105
3106 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3107 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3108 device_data->wsi_enabled = true;
3109 }
Mark Youngead9b932016-09-08 12:28:38 -06003110 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) {
3111 device_data->wsi_display_swapchain_enabled = true;
3112 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003113 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3114 device_data->objtrack_extensions_enabled = true;
3115 }
3116 }
3117}
3118
3119static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3120 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3121
3122
3123 instanceExtMap[pDisp] = {};
3124
3125 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3126 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3127 instanceExtMap[pDisp].wsi_enabled = true;
3128 }
3129#ifdef VK_USE_PLATFORM_XLIB_KHR
3130 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3131 instanceExtMap[pDisp].xlib_enabled = true;
3132 }
3133#endif
3134#ifdef VK_USE_PLATFORM_XCB_KHR
3135 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3136 instanceExtMap[pDisp].xcb_enabled = true;
3137 }
3138#endif
3139#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3140 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3141 instanceExtMap[pDisp].wayland_enabled = true;
3142 }
3143#endif
3144#ifdef VK_USE_PLATFORM_MIR_KHR
3145 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3146 instanceExtMap[pDisp].mir_enabled = true;
3147 }
3148#endif
3149#ifdef VK_USE_PLATFORM_ANDROID_KHR
3150 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3151 instanceExtMap[pDisp].android_enabled = true;
3152 }
3153#endif
3154#ifdef VK_USE_PLATFORM_WIN32_KHR
3155 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3156 instanceExtMap[pDisp].win32_enabled = true;
3157 }
3158#endif
3159 }
3160}
3161
3162VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3163 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3164 std::lock_guard<std::mutex> lock(global_lock);
3165 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3166 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3167
3168 assert(chain_info->u.pLayerInfo);
3169 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3170 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3171 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3172 if (fpCreateDevice == NULL) {
3173 return VK_ERROR_INITIALIZATION_FAILED;
3174 }
3175
3176 // Advance the link info for the next element on the chain
3177 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3178
3179 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3180 if (result != VK_SUCCESS) {
3181 return result;
3182 }
3183
3184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3185 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3186
3187 // Add link back to physDev
3188 device_data->physical_device = physicalDevice;
3189
3190 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3191
3192 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
3193 CreateDispatchableObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT);
3194
3195 return result;
3196}
3197
3198VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3199 uint32_t *pQueueFamilyPropertyCount,
3200 VkQueueFamilyProperties *pQueueFamilyProperties) {
3201 get_dispatch_table(ot_instance_table_map, physicalDevice)
3202 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3203 std::lock_guard<std::mutex> lock(global_lock);
3204 if (pQueueFamilyProperties != NULL) {
3205 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3206 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3207 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3208 }
3209 }
3210}
3211
3212VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3213 VkInstance *pInstance) {
3214 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3215
3216 assert(chain_info->u.pLayerInfo);
3217 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3218 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3219 if (fpCreateInstance == NULL) {
3220 return VK_ERROR_INITIALIZATION_FAILED;
3221 }
3222
3223 // Advance the link info for the next element on the chain
3224 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3225
3226 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3227 if (result != VK_SUCCESS) {
3228 return result;
3229 }
3230
3231 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3232 instance_data->instance = *pInstance;
3233 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3234 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3235
3236 // Look for one or more debug report create info structures, and copy the
3237 // callback(s) for each one found (for use by vkDestroyInstance)
3238 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3239 &instance_data->tmp_callbacks);
3240
3241 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3242 pCreateInfo->ppEnabledExtensionNames);
3243
3244 InitObjectTracker(instance_data, pAllocator);
3245 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3246
3247 CreateDispatchableObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
3248
3249 return result;
3250}
3251
3252VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3253 VkPhysicalDevice *pPhysicalDevices) {
3254 bool skip_call = VK_FALSE;
3255 std::unique_lock<std::mutex> lock(global_lock);
3256 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
3257 lock.unlock();
3258 if (skip_call) {
3259 return VK_ERROR_VALIDATION_FAILED_EXT;
3260 }
3261 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3262 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3263 lock.lock();
3264 if (result == VK_SUCCESS) {
3265 if (pPhysicalDevices) {
3266 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3267 CreateDispatchableObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT);
3268 }
3269 }
3270 }
3271 lock.unlock();
3272 return result;
3273}
3274
3275VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3276 std::unique_lock<std::mutex> lock(global_lock);
3277 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3278 lock.unlock();
3279
3280 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3281
3282 lock.lock();
3283
3284 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3285 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3286}
3287
3288VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3289 std::unique_lock<std::mutex> lock(global_lock);
3290 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3291 lock.unlock();
3292
3293 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3294
3295 lock.lock();
3296 DestroyNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
3297}
3298
3299VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3300 VkMemoryMapFlags flags, void **ppData) {
3301 bool skip_call = VK_FALSE;
3302 std::unique_lock<std::mutex> lock(global_lock);
3303 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3304 lock.unlock();
3305 if (skip_call == VK_TRUE) {
3306 return VK_ERROR_VALIDATION_FAILED_EXT;
3307 }
3308 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3309 return result;
3310}
3311
3312VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3313 bool skip_call = VK_FALSE;
3314 std::unique_lock<std::mutex> lock(global_lock);
3315 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3316 lock.unlock();
3317 if (skip_call == VK_TRUE) {
3318 return;
3319 }
3320
3321 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3322}
3323VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3324 VkFence fence) {
3325 std::unique_lock<std::mutex> lock(global_lock);
3326 ValidateQueueFlags(queue, "QueueBindSparse");
3327
3328 for (uint32_t i = 0; i < bindInfoCount; i++) {
3329 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
3330 ValidateNonDispatchableObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3331 false);
3332 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
3333 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3334 false);
3335 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
3336 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
3337 }
3338 lock.unlock();
3339
3340 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3341 return result;
3342}
3343
3344VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3345 VkCommandBuffer *pCommandBuffers) {
3346 bool skip_call = VK_FALSE;
3347 std::unique_lock<std::mutex> lock(global_lock);
3348 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3349 skip_call |=
3350 ValidateNonDispatchableObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3351 lock.unlock();
3352
3353 if (skip_call) {
3354 return VK_ERROR_VALIDATION_FAILED_EXT;
3355 }
3356
3357 VkResult result =
3358 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3359
3360 lock.lock();
3361 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3362 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3363 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3364 }
3365 lock.unlock();
3366
3367 return result;
3368}
3369
3370VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3371 VkDescriptorSet *pDescriptorSets) {
3372 bool skip_call = VK_FALSE;
3373 std::unique_lock<std::mutex> lock(global_lock);
3374 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3375 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->descriptorPool,
3376 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3377 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3378 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->pSetLayouts[i],
3379 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3380 }
3381 lock.unlock();
3382 if (skip_call) {
3383 return VK_ERROR_VALIDATION_FAILED_EXT;
3384 }
3385
3386 VkResult result =
3387 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3388
3389 if (VK_SUCCESS == result) {
3390 lock.lock();
3391 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3392 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3393 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3394 }
3395 lock.unlock();
3396 }
3397
3398 return result;
3399}
3400
3401VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3402 const VkCommandBuffer *pCommandBuffers) {
3403 bool skip_call = false;
3404 std::unique_lock<std::mutex> lock(global_lock);
3405 ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3406 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3407 for (uint32_t i = 0; i < commandBufferCount; i++) {
3408 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3409 }
3410
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003411 for (uint32_t i = 0; i < commandBufferCount; i++) {
3412 DestroyDispatchableObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
3413 }
3414
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003415 lock.unlock();
3416 if (!skip_call) {
3417 get_dispatch_table(ot_device_table_map, device)
3418 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3419 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003420}
3421VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3422 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3423 std::unique_lock<std::mutex> lock(global_lock);
3424 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3425 // Remove this swapchain's images from our map of such images.
3426 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3427 while (itr != device_data->swapchainImageMap.end()) {
3428 OBJTRACK_NODE *pNode = (*itr).second;
3429 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3430 delete pNode;
3431 auto delete_item = itr++;
3432 device_data->swapchainImageMap.erase(delete_item);
3433 } else {
3434 ++itr;
3435 }
3436 }
3437 DestroyNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
3438 lock.unlock();
3439
3440 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3441}
3442
3443VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3444 const VkDescriptorSet *pDescriptorSets) {
3445 bool skip_call = false;
3446 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3447 std::unique_lock<std::mutex> lock(global_lock);
3448 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3449 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3450 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3451 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3452 }
3453
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003454 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3455 DestroyNonDispatchableObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3456 }
3457
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003458 lock.unlock();
3459 if (!skip_call) {
3460 result = get_dispatch_table(ot_device_table_map, device)
3461 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3462 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003463 return result;
3464}
3465
3466VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3467 const VkAllocationCallbacks *pAllocator) {
3468 bool skip_call = VK_FALSE;
3469 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3470 std::unique_lock<std::mutex> lock(global_lock);
3471 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3472 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3473 lock.unlock();
3474 if (skip_call) {
3475 return;
3476 }
3477 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3478 // Remove this pool's descriptor sets from our descriptorSet map.
3479 lock.lock();
3480 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3481 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3482 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3483 OBJTRACK_NODE *pNode = (*itr).second;
3484 auto del_itr = itr++;
3485 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
3486 DestroyNonDispatchableObject(device, (VkDescriptorSet)((*del_itr).first),
3487 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3488 }
3489 }
3490 DestroyNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
3491 lock.unlock();
3492 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3493}
3494
3495VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3496 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3497 bool skip_call = false;
3498 std::unique_lock<std::mutex> lock(global_lock);
3499 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3500 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3501 lock.unlock();
3502 if (skip_call) {
3503 return;
3504 }
3505 lock.lock();
3506 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3507 // Remove this pool's cmdBuffers from our cmd buffer map.
3508 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3509 auto del_itr = itr;
3510 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3511 OBJTRACK_NODE *pNode = (*itr).second;
3512 del_itr = itr++;
3513 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3514 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
3515 DestroyDispatchableObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
3516 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
3517 }
3518 }
3519 DestroyNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
3520 lock.unlock();
3521 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3522}
3523
3524VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3525 VkImage *pSwapchainImages) {
3526 bool skip_call = VK_FALSE;
3527 std::unique_lock<std::mutex> lock(global_lock);
3528 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3529 lock.unlock();
3530 if (skip_call) {
3531 return VK_ERROR_VALIDATION_FAILED_EXT;
3532 }
3533 VkResult result = get_dispatch_table(ot_device_table_map, device)
3534 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3535 if (pSwapchainImages != NULL) {
3536 lock.lock();
3537 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3538 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3539 }
3540 lock.unlock();
3541 }
3542 return result;
3543}
3544
3545VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3546 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3547 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3548 bool skip_call = VK_FALSE;
3549 std::unique_lock<std::mutex> lock(global_lock);
3550 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3551 if (pCreateInfos) {
3552 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3553 if (pCreateInfos[idx0].basePipelineHandle) {
3554 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3555 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3556 }
3557 if (pCreateInfos[idx0].layout) {
3558 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3559 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3560 }
3561 if (pCreateInfos[idx0].pStages) {
3562 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3563 if (pCreateInfos[idx0].pStages[idx1].module) {
3564 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].pStages[idx1].module,
3565 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3566 }
3567 }
3568 }
3569 if (pCreateInfos[idx0].renderPass) {
3570 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].renderPass,
3571 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3572 }
3573 }
3574 }
3575 if (pipelineCache) {
3576 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3577 }
3578 lock.unlock();
3579 if (skip_call) {
3580 return VK_ERROR_VALIDATION_FAILED_EXT;
3581 }
3582 VkResult result = get_dispatch_table(ot_device_table_map, device)
3583 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3584 lock.lock();
3585 if (result == VK_SUCCESS) {
3586 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
3587 CreateNonDispatchableObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
3588 }
3589 }
3590 lock.unlock();
3591 return result;
3592}
3593
3594VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3595 const VkComputePipelineCreateInfo *pCreateInfos,
3596 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3597 bool skip_call = VK_FALSE;
3598 std::unique_lock<std::mutex> lock(global_lock);
3599 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3600 if (pCreateInfos) {
3601 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3602 if (pCreateInfos[idx0].basePipelineHandle) {
3603 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3604 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3605 }
3606 if (pCreateInfos[idx0].layout) {
3607 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3608 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3609 }
3610 if (pCreateInfos[idx0].stage.module) {
3611 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].stage.module,
3612 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3613 }
3614 }
3615 }
3616 if (pipelineCache) {
3617 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3618 }
3619 lock.unlock();
3620 if (skip_call) {
3621 return VK_ERROR_VALIDATION_FAILED_EXT;
3622 }
3623 VkResult result = get_dispatch_table(ot_device_table_map, device)
3624 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3625 lock.lock();
3626 if (result == VK_SUCCESS) {
3627 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
3628 CreateNonDispatchableObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
3629 }
3630 }
3631 lock.unlock();
3632 return result;
3633}
3634
3635static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3636 if (!name || name[0] != 'v' || name[1] != 'k')
3637 return NULL;
3638
3639 name += 2;
3640 if (!strcmp(name, "GetDeviceProcAddr"))
3641 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3642 if (!strcmp(name, "DestroyDevice"))
3643 return (PFN_vkVoidFunction)DestroyDevice;
3644 if (!strcmp(name, "GetDeviceQueue"))
3645 return (PFN_vkVoidFunction)GetDeviceQueue;
3646 if (!strcmp(name, "QueueSubmit"))
3647 return (PFN_vkVoidFunction)QueueSubmit;
3648 if (!strcmp(name, "QueueWaitIdle"))
3649 return (PFN_vkVoidFunction)QueueWaitIdle;
3650 if (!strcmp(name, "DeviceWaitIdle"))
3651 return (PFN_vkVoidFunction)DeviceWaitIdle;
3652 if (!strcmp(name, "AllocateMemory"))
3653 return (PFN_vkVoidFunction)AllocateMemory;
3654 if (!strcmp(name, "FreeMemory"))
3655 return (PFN_vkVoidFunction)FreeMemory;
3656 if (!strcmp(name, "MapMemory"))
3657 return (PFN_vkVoidFunction)MapMemory;
3658 if (!strcmp(name, "UnmapMemory"))
3659 return (PFN_vkVoidFunction)UnmapMemory;
3660 if (!strcmp(name, "FlushMappedMemoryRanges"))
3661 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3662 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3663 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3664 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3665 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3666 if (!strcmp(name, "BindBufferMemory"))
3667 return (PFN_vkVoidFunction)BindBufferMemory;
3668 if (!strcmp(name, "BindImageMemory"))
3669 return (PFN_vkVoidFunction)BindImageMemory;
3670 if (!strcmp(name, "GetBufferMemoryRequirements"))
3671 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3672 if (!strcmp(name, "GetImageMemoryRequirements"))
3673 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3674 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3675 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3676 if (!strcmp(name, "QueueBindSparse"))
3677 return (PFN_vkVoidFunction)QueueBindSparse;
3678 if (!strcmp(name, "CreateFence"))
3679 return (PFN_vkVoidFunction)CreateFence;
3680 if (!strcmp(name, "DestroyFence"))
3681 return (PFN_vkVoidFunction)DestroyFence;
3682 if (!strcmp(name, "ResetFences"))
3683 return (PFN_vkVoidFunction)ResetFences;
3684 if (!strcmp(name, "GetFenceStatus"))
3685 return (PFN_vkVoidFunction)GetFenceStatus;
3686 if (!strcmp(name, "WaitForFences"))
3687 return (PFN_vkVoidFunction)WaitForFences;
3688 if (!strcmp(name, "CreateSemaphore"))
3689 return (PFN_vkVoidFunction)CreateSemaphore;
3690 if (!strcmp(name, "DestroySemaphore"))
3691 return (PFN_vkVoidFunction)DestroySemaphore;
3692 if (!strcmp(name, "CreateEvent"))
3693 return (PFN_vkVoidFunction)CreateEvent;
3694 if (!strcmp(name, "DestroyEvent"))
3695 return (PFN_vkVoidFunction)DestroyEvent;
3696 if (!strcmp(name, "GetEventStatus"))
3697 return (PFN_vkVoidFunction)GetEventStatus;
3698 if (!strcmp(name, "SetEvent"))
3699 return (PFN_vkVoidFunction)SetEvent;
3700 if (!strcmp(name, "ResetEvent"))
3701 return (PFN_vkVoidFunction)ResetEvent;
3702 if (!strcmp(name, "CreateQueryPool"))
3703 return (PFN_vkVoidFunction)CreateQueryPool;
3704 if (!strcmp(name, "DestroyQueryPool"))
3705 return (PFN_vkVoidFunction)DestroyQueryPool;
3706 if (!strcmp(name, "GetQueryPoolResults"))
3707 return (PFN_vkVoidFunction)GetQueryPoolResults;
3708 if (!strcmp(name, "CreateBuffer"))
3709 return (PFN_vkVoidFunction)CreateBuffer;
3710 if (!strcmp(name, "DestroyBuffer"))
3711 return (PFN_vkVoidFunction)DestroyBuffer;
3712 if (!strcmp(name, "CreateBufferView"))
3713 return (PFN_vkVoidFunction)CreateBufferView;
3714 if (!strcmp(name, "DestroyBufferView"))
3715 return (PFN_vkVoidFunction)DestroyBufferView;
3716 if (!strcmp(name, "CreateImage"))
3717 return (PFN_vkVoidFunction)CreateImage;
3718 if (!strcmp(name, "DestroyImage"))
3719 return (PFN_vkVoidFunction)DestroyImage;
3720 if (!strcmp(name, "GetImageSubresourceLayout"))
3721 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3722 if (!strcmp(name, "CreateImageView"))
3723 return (PFN_vkVoidFunction)CreateImageView;
3724 if (!strcmp(name, "DestroyImageView"))
3725 return (PFN_vkVoidFunction)DestroyImageView;
3726 if (!strcmp(name, "CreateShaderModule"))
3727 return (PFN_vkVoidFunction)CreateShaderModule;
3728 if (!strcmp(name, "DestroyShaderModule"))
3729 return (PFN_vkVoidFunction)DestroyShaderModule;
3730 if (!strcmp(name, "CreatePipelineCache"))
3731 return (PFN_vkVoidFunction)CreatePipelineCache;
3732 if (!strcmp(name, "DestroyPipelineCache"))
3733 return (PFN_vkVoidFunction)DestroyPipelineCache;
3734 if (!strcmp(name, "GetPipelineCacheData"))
3735 return (PFN_vkVoidFunction)GetPipelineCacheData;
3736 if (!strcmp(name, "MergePipelineCaches"))
3737 return (PFN_vkVoidFunction)MergePipelineCaches;
3738 if (!strcmp(name, "CreateGraphicsPipelines"))
3739 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3740 if (!strcmp(name, "CreateComputePipelines"))
3741 return (PFN_vkVoidFunction)CreateComputePipelines;
3742 if (!strcmp(name, "DestroyPipeline"))
3743 return (PFN_vkVoidFunction)DestroyPipeline;
3744 if (!strcmp(name, "CreatePipelineLayout"))
3745 return (PFN_vkVoidFunction)CreatePipelineLayout;
3746 if (!strcmp(name, "DestroyPipelineLayout"))
3747 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3748 if (!strcmp(name, "CreateSampler"))
3749 return (PFN_vkVoidFunction)CreateSampler;
3750 if (!strcmp(name, "DestroySampler"))
3751 return (PFN_vkVoidFunction)DestroySampler;
3752 if (!strcmp(name, "CreateDescriptorSetLayout"))
3753 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3754 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3755 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3756 if (!strcmp(name, "CreateDescriptorPool"))
3757 return (PFN_vkVoidFunction)CreateDescriptorPool;
3758 if (!strcmp(name, "DestroyDescriptorPool"))
3759 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3760 if (!strcmp(name, "ResetDescriptorPool"))
3761 return (PFN_vkVoidFunction)ResetDescriptorPool;
3762 if (!strcmp(name, "AllocateDescriptorSets"))
3763 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3764 if (!strcmp(name, "FreeDescriptorSets"))
3765 return (PFN_vkVoidFunction)FreeDescriptorSets;
3766 if (!strcmp(name, "UpdateDescriptorSets"))
3767 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3768 if (!strcmp(name, "CreateFramebuffer"))
3769 return (PFN_vkVoidFunction)CreateFramebuffer;
3770 if (!strcmp(name, "DestroyFramebuffer"))
3771 return (PFN_vkVoidFunction)DestroyFramebuffer;
3772 if (!strcmp(name, "CreateRenderPass"))
3773 return (PFN_vkVoidFunction)CreateRenderPass;
3774 if (!strcmp(name, "DestroyRenderPass"))
3775 return (PFN_vkVoidFunction)DestroyRenderPass;
3776 if (!strcmp(name, "GetRenderAreaGranularity"))
3777 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3778 if (!strcmp(name, "CreateCommandPool"))
3779 return (PFN_vkVoidFunction)CreateCommandPool;
3780 if (!strcmp(name, "DestroyCommandPool"))
3781 return (PFN_vkVoidFunction)DestroyCommandPool;
3782 if (!strcmp(name, "ResetCommandPool"))
3783 return (PFN_vkVoidFunction)ResetCommandPool;
3784 if (!strcmp(name, "AllocateCommandBuffers"))
3785 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3786 if (!strcmp(name, "FreeCommandBuffers"))
3787 return (PFN_vkVoidFunction)FreeCommandBuffers;
3788 if (!strcmp(name, "BeginCommandBuffer"))
3789 return (PFN_vkVoidFunction)BeginCommandBuffer;
3790 if (!strcmp(name, "EndCommandBuffer"))
3791 return (PFN_vkVoidFunction)EndCommandBuffer;
3792 if (!strcmp(name, "ResetCommandBuffer"))
3793 return (PFN_vkVoidFunction)ResetCommandBuffer;
3794 if (!strcmp(name, "CmdBindPipeline"))
3795 return (PFN_vkVoidFunction)CmdBindPipeline;
3796 if (!strcmp(name, "CmdSetViewport"))
3797 return (PFN_vkVoidFunction)CmdSetViewport;
3798 if (!strcmp(name, "CmdSetScissor"))
3799 return (PFN_vkVoidFunction)CmdSetScissor;
3800 if (!strcmp(name, "CmdSetLineWidth"))
3801 return (PFN_vkVoidFunction)CmdSetLineWidth;
3802 if (!strcmp(name, "CmdSetDepthBias"))
3803 return (PFN_vkVoidFunction)CmdSetDepthBias;
3804 if (!strcmp(name, "CmdSetBlendConstants"))
3805 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3806 if (!strcmp(name, "CmdSetDepthBounds"))
3807 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3808 if (!strcmp(name, "CmdSetStencilCompareMask"))
3809 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3810 if (!strcmp(name, "CmdSetStencilWriteMask"))
3811 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3812 if (!strcmp(name, "CmdSetStencilReference"))
3813 return (PFN_vkVoidFunction)CmdSetStencilReference;
3814 if (!strcmp(name, "CmdBindDescriptorSets"))
3815 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3816 if (!strcmp(name, "CmdBindIndexBuffer"))
3817 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3818 if (!strcmp(name, "CmdBindVertexBuffers"))
3819 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3820 if (!strcmp(name, "CmdDraw"))
3821 return (PFN_vkVoidFunction)CmdDraw;
3822 if (!strcmp(name, "CmdDrawIndexed"))
3823 return (PFN_vkVoidFunction)CmdDrawIndexed;
3824 if (!strcmp(name, "CmdDrawIndirect"))
3825 return (PFN_vkVoidFunction)CmdDrawIndirect;
3826 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3827 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3828 if (!strcmp(name, "CmdDispatch"))
3829 return (PFN_vkVoidFunction)CmdDispatch;
3830 if (!strcmp(name, "CmdDispatchIndirect"))
3831 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3832 if (!strcmp(name, "CmdCopyBuffer"))
3833 return (PFN_vkVoidFunction)CmdCopyBuffer;
3834 if (!strcmp(name, "CmdCopyImage"))
3835 return (PFN_vkVoidFunction)CmdCopyImage;
3836 if (!strcmp(name, "CmdBlitImage"))
3837 return (PFN_vkVoidFunction)CmdBlitImage;
3838 if (!strcmp(name, "CmdCopyBufferToImage"))
3839 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3840 if (!strcmp(name, "CmdCopyImageToBuffer"))
3841 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3842 if (!strcmp(name, "CmdUpdateBuffer"))
3843 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3844 if (!strcmp(name, "CmdFillBuffer"))
3845 return (PFN_vkVoidFunction)CmdFillBuffer;
3846 if (!strcmp(name, "CmdClearColorImage"))
3847 return (PFN_vkVoidFunction)CmdClearColorImage;
3848 if (!strcmp(name, "CmdClearDepthStencilImage"))
3849 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3850 if (!strcmp(name, "CmdClearAttachments"))
3851 return (PFN_vkVoidFunction)CmdClearAttachments;
3852 if (!strcmp(name, "CmdResolveImage"))
3853 return (PFN_vkVoidFunction)CmdResolveImage;
3854 if (!strcmp(name, "CmdSetEvent"))
3855 return (PFN_vkVoidFunction)CmdSetEvent;
3856 if (!strcmp(name, "CmdResetEvent"))
3857 return (PFN_vkVoidFunction)CmdResetEvent;
3858 if (!strcmp(name, "CmdWaitEvents"))
3859 return (PFN_vkVoidFunction)CmdWaitEvents;
3860 if (!strcmp(name, "CmdPipelineBarrier"))
3861 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3862 if (!strcmp(name, "CmdBeginQuery"))
3863 return (PFN_vkVoidFunction)CmdBeginQuery;
3864 if (!strcmp(name, "CmdEndQuery"))
3865 return (PFN_vkVoidFunction)CmdEndQuery;
3866 if (!strcmp(name, "CmdResetQueryPool"))
3867 return (PFN_vkVoidFunction)CmdResetQueryPool;
3868 if (!strcmp(name, "CmdWriteTimestamp"))
3869 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3870 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3871 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3872 if (!strcmp(name, "CmdPushConstants"))
3873 return (PFN_vkVoidFunction)CmdPushConstants;
3874 if (!strcmp(name, "CmdBeginRenderPass"))
3875 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3876 if (!strcmp(name, "CmdNextSubpass"))
3877 return (PFN_vkVoidFunction)CmdNextSubpass;
3878 if (!strcmp(name, "CmdEndRenderPass"))
3879 return (PFN_vkVoidFunction)CmdEndRenderPass;
3880 if (!strcmp(name, "CmdExecuteCommands"))
3881 return (PFN_vkVoidFunction)CmdExecuteCommands;
3882
3883 return NULL;
3884}
3885static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3886 if (!name || name[0] != 'v' || name[1] != 'k')
3887 return NULL;
3888
3889 name += 2;
3890 if (!strcmp(name, "CreateInstance"))
3891 return (PFN_vkVoidFunction)CreateInstance;
3892 if (!strcmp(name, "DestroyInstance"))
3893 return (PFN_vkVoidFunction)DestroyInstance;
3894 if (!strcmp(name, "EnumeratePhysicalDevices"))
3895 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
3896 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
3897 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
3898 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
3899 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
3900 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
3901 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
3902 if (!strcmp(name, "GetPhysicalDeviceProperties"))
3903 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
3904 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
3905 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
3906 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
3907 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
3908 if (!strcmp(name, "GetInstanceProcAddr"))
3909 return (PFN_vkVoidFunction)GetInstanceProcAddr;
3910 if (!strcmp(name, "CreateDevice"))
3911 return (PFN_vkVoidFunction)CreateDevice;
3912 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
3913 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
3914 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
3915 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
3916 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
3917 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
3918 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
3919 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
3920
3921 return NULL;
3922}
3923
3924static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
3925 if (device) {
3926 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Mark Youngead9b932016-09-08 12:28:38 -06003927
3928 if (device_data->wsi_enabled) {
3929 if (!strcmp("vkCreateSwapchainKHR", name))
3930 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
3931 if (!strcmp("vkDestroySwapchainKHR", name))
3932 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
3933 if (!strcmp("vkGetSwapchainImagesKHR", name))
3934 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
3935 if (!strcmp("vkAcquireNextImageKHR", name))
3936 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
3937 if (!strcmp("vkQueuePresentKHR", name))
3938 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
3939 }
3940
3941 if (device_data->wsi_display_swapchain_enabled) {
3942 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
3943 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
3944 }
3945 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003946 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003947
3948 return nullptr;
3949}
3950
3951VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
3952 PFN_vkVoidFunction addr;
3953 addr = InterceptCoreDeviceCommand(funcName);
3954 if (addr) {
3955 return addr;
3956 }
3957 assert(device);
3958
3959 addr = InterceptWsiEnabledCommand(funcName, device);
3960 if (addr) {
3961 return addr;
3962 }
3963 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
3964 return NULL;
3965 }
3966 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
3967}
3968
3969VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
3970 PFN_vkVoidFunction addr;
3971 addr = InterceptCoreInstanceCommand(funcName);
3972 if (!addr) {
3973 addr = InterceptCoreDeviceCommand(funcName);
3974 }
3975 if (!addr) {
3976 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
3977 }
3978 if (addr) {
3979 return addr;
3980 }
3981 assert(instance);
3982
3983 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
3984 if (addr) {
3985 return addr;
3986 }
3987 addr = InterceptWsiEnabledCommand(funcName, instance);
3988 if (addr) {
3989 return addr;
3990 }
3991 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
3992 return NULL;
3993 }
3994 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
3995}
3996
3997} // namespace object_tracker
3998
3999// vk_layer_logging.h expects these to be defined
4000VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
4001 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
4002 const VkAllocationCallbacks *pAllocator,
4003 VkDebugReportCallbackEXT *pMsgCallback) {
4004 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
4005}
4006
4007VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
4008 const VkAllocationCallbacks *pAllocator) {
4009 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
4010}
4011
4012VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
4013 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
4014 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
4015 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
4016}
4017
4018// Loader-layer interface v0, just wrappers since there is only a layer
4019VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
4020 VkExtensionProperties *pProperties) {
4021 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
4022}
4023
4024VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
4025 VkLayerProperties *pProperties) {
4026 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
4027}
4028
4029VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
4030 VkLayerProperties *pProperties) {
4031 // The layer command handles VK_NULL_HANDLE just fine internally
4032 assert(physicalDevice == VK_NULL_HANDLE);
4033 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
4034}
4035
4036VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
4037 return object_tracker::GetDeviceProcAddr(dev, funcName);
4038}
4039
4040VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
4041 return object_tracker::GetInstanceProcAddr(instance, funcName);
4042}
4043
4044VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
4045 const char *pLayerName, uint32_t *pCount,
4046 VkExtensionProperties *pProperties) {
4047 // The layer command handles VK_NULL_HANDLE just fine internally
4048 assert(physicalDevice == VK_NULL_HANDLE);
4049 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
Mark Lobodzinski38080682016-07-22 15:30:27 -06004050}