blob: 16140d66a046fe7575a3b4152c95a17e51f13b4d [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
155 skip_call |=
156 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
157 OBJTRACK_NONE, LayerName,
158 "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?", object_handle);
159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
200 skip_call |=
201 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
202 OBJTRACK_NONE, LayerName,
203 "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?", object_handle);
204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
245template <typename T1, typename T2>
246static void CreateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
247 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
248
249 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
250 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
251 object_name[object_type], reinterpret_cast<uint64_t>(object));
252
253 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
254 pNewObjNode->object_type = object_type;
255 pNewObjNode->status = OBJSTATUS_NONE;
256 pNewObjNode->handle = reinterpret_cast<uint64_t>(object);
257 instance_data->object_map[object_type][reinterpret_cast<uint64_t>(object)] = pNewObjNode;
258 instance_data->num_objects[object_type]++;
259 instance_data->num_total_objects++;
260}
261
262template <typename T1, typename T2>
263static void CreateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
264 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
265
266 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t &>(object),
267 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
268 object_name[object_type], reinterpret_cast<uint64_t &>(object));
269
270 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
271 pNewObjNode->object_type = object_type;
272 pNewObjNode->status = OBJSTATUS_NONE;
273 pNewObjNode->handle = reinterpret_cast<uint64_t &>(object);
274 device_data->object_map[object_type][reinterpret_cast<uint64_t &>(object)] = pNewObjNode;
275 device_data->num_objects[object_type]++;
276 device_data->num_total_objects++;
277}
278
279template <typename T1, typename T2>
280static void DestroyDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
281 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
282
283 uint64_t object_handle = reinterpret_cast<uint64_t>(object);
284
285 auto item = instance_data->object_map[object_type].find(object_handle);
286 if (item != instance_data->object_map[object_type].end()) {
287
288 OBJTRACK_NODE *pNode = item->second;
289 assert(instance_data->num_total_objects > 0);
290 instance_data->num_total_objects--;
291 assert(instance_data->num_objects[object_type] > 0);
292 instance_data->num_objects[pNode->object_type]--;
293
294 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
295 OBJTRACK_NONE, LayerName,
296 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
297 object_name[pNode->object_type], reinterpret_cast<uint64_t>(object), instance_data->num_total_objects,
298 instance_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
299
300 delete pNode;
301 instance_data->object_map[object_type].erase(item);
302 } else {
303 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
304 OBJTRACK_UNKNOWN_OBJECT, LayerName,
305 "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?", object_handle);
306 }
307}
308
309template <typename T1, typename T2>
310static void DestroyNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type) {
311 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
312
313 uint64_t object_handle = reinterpret_cast<uint64_t &>(object);
314
315 auto item = device_data->object_map[object_type].find(object_handle);
316 if (item != device_data->object_map[object_type].end()) {
317
318 OBJTRACK_NODE *pNode = item->second;
319 assert(device_data->num_total_objects > 0);
320 device_data->num_total_objects--;
321 assert(device_data->num_objects[pNode->object_type] > 0);
322 device_data->num_objects[pNode->object_type]--;
323
324 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
325 OBJTRACK_NONE, LayerName,
326 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
327 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
328 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
329
330 delete pNode;
331 device_data->object_map[object_type].erase(item);
332 } else {
333 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
334 OBJTRACK_UNKNOWN_OBJECT, LayerName,
335 "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?", object_handle);
336 }
337}
338
339template <typename T1, typename T2>
340static bool ValidateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
341 bool null_allowed) {
342 if (null_allowed && (object == VK_NULL_HANDLE)) {
343 return false;
344 }
345 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
346
347 if (instance_data->object_map[object_type].find(reinterpret_cast<uint64_t>(object)) ==
348 instance_data->object_map[object_type].end()) {
349 return log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
350 __LINE__, OBJTRACK_INVALID_OBJECT, LayerName, "Invalid %s Object 0x%" PRIx64, object_name[object_type],
351 reinterpret_cast<uint64_t>(object));
352 }
353 return false;
354}
355
356template <typename T1, typename T2>
357static bool ValidateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
358 bool null_allowed) {
359 if (null_allowed && (object == VK_NULL_HANDLE)) {
360 return false;
361 }
362 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
363 if (device_data->object_map[object_type].find(reinterpret_cast<uint64_t &>(object)) ==
364 device_data->object_map[object_type].end()) {
365 // If object is an image, also look for it in the swapchain image map
366 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
367 (device_data->swapchainImageMap.find(reinterpret_cast<uint64_t &>(object)) == device_data->swapchainImageMap.end())) {
368 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
369 reinterpret_cast<uint64_t &>(object), __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
370 "Invalid %s Object 0x%" PRIx64, object_name[object_type], reinterpret_cast<uint64_t &>(object));
371 }
372 }
373 return false;
374}
375
376static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
377 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
378 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
379 OBJTRACK_NODE *object_info = item->second;
380 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
381 OBJTRACK_OBJECT_LEAK, LayerName,
382 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
383 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
384 item = device_data->object_map[object_type].erase(item);
385 }
386}
387
388VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
389 std::unique_lock<std::mutex> lock(global_lock);
390
391 dispatch_key key = get_dispatch_key(instance);
392 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
393
394 // Enable the temporary callback(s) here to catch cleanup issues:
395 bool callback_setup = false;
396 if (instance_data->num_tmp_callbacks > 0) {
397 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
398 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
399 callback_setup = true;
400 }
401 }
402
403 ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
404
405 DestroyDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
406 // Report any remaining objects in LL
407
408 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
409 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
410 OBJTRACK_NODE *pNode = iit->second;
411
412 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
413
414 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
415 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
416 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
417 // Semaphore:
418 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
419 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
420 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
421 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
422 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
423 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
424 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
425 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
426 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
427 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
428 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
429 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
430 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
431 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
432 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
433 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
434 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
435 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
436 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
437 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
438 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
439 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
440 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
441 }
442 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
443
444 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
445 pInstanceTable->DestroyInstance(instance, pAllocator);
446
447 // Disable and cleanup the temporary callback(s):
448 if (callback_setup) {
449 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
450 }
451 if (instance_data->num_tmp_callbacks > 0) {
452 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
453 instance_data->num_tmp_callbacks = 0;
454 }
455
456 // Clean up logging callback, if any
457 while (instance_data->logging_callback.size() > 0) {
458 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
459 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
460 instance_data->logging_callback.pop_back();
461 }
462
463 layer_debug_report_destroy_instance(instance_data->report_data);
464 layer_data_map.erase(key);
465
466 instanceExtMap.erase(pInstanceTable);
467 lock.unlock();
468 ot_instance_table_map.erase(key);
469}
470
471VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
472
473 std::unique_lock<std::mutex> lock(global_lock);
474 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
475 DestroyDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT);
476
477 // Report any remaining objects associated with this VkDevice object in LL
478 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
479 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
480 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
481 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
482 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
483 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
484 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
485 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
486 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
487 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
488 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
489 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
490 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
491 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
492 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
493 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
494 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
495 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
496 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
497 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
498 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
499 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
500 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
501
502 // Clean up Queue's MemRef Linked Lists
503 DestroyQueueDataStructures(device);
504
505 lock.unlock();
506
507 dispatch_key key = get_dispatch_key(device);
508 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
509 pDisp->DestroyDevice(device, pAllocator);
510 ot_device_table_map.erase(key);
511}
512
513VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
514 bool skip_call = false;
515 {
516 std::lock_guard<std::mutex> lock(global_lock);
517 skip_call |=
518 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
519 }
520 if (skip_call) {
521 return;
522 }
523 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
524}
525
526VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
527 VkFormatProperties *pFormatProperties) {
528 bool skip_call = false;
529 {
530 std::lock_guard<std::mutex> lock(global_lock);
531 skip_call |=
532 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
533 }
534 if (skip_call) {
535 return;
536 }
537 get_dispatch_table(ot_instance_table_map, physicalDevice)
538 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
539}
540
541VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
542 VkImageType type, VkImageTiling tiling,
543 VkImageUsageFlags usage, VkImageCreateFlags flags,
544 VkImageFormatProperties *pImageFormatProperties) {
545 bool skip_call = false;
546 {
547 std::lock_guard<std::mutex> lock(global_lock);
548 skip_call |=
549 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
550 }
551 if (skip_call) {
552 return VK_ERROR_VALIDATION_FAILED_EXT;
553 }
554 VkResult result =
555 get_dispatch_table(ot_instance_table_map, physicalDevice)
556 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
557 return result;
558}
559
560VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
561 bool skip_call = false;
562 {
563 std::lock_guard<std::mutex> lock(global_lock);
564 skip_call |=
565 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
566 }
567 if (skip_call) {
568 return;
569 }
570 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
571}
572
573VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
574 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
575 bool skip_call = false;
576 {
577 std::lock_guard<std::mutex> lock(global_lock);
578 skip_call |=
579 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
580 }
581 if (skip_call) {
582 return;
583 }
584 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
585}
586
587VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
588
589VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
590
591VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
592 VkExtensionProperties *pProperties);
593
594VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
595
596VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
597 VkLayerProperties *pProperties);
598
599VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
600 bool skip_call = false;
601 {
602 std::lock_guard<std::mutex> lock(global_lock);
603 skip_call |= ValidateNonDispatchableObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
604 if (pSubmits) {
605 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
606 if (pSubmits[idx0].pCommandBuffers) {
607 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
608 skip_call |= ValidateDispatchableObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
609 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
610 }
611 }
612 if (pSubmits[idx0].pSignalSemaphores) {
613 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
614 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
615 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
616 }
617 }
618 if (pSubmits[idx0].pWaitSemaphores) {
619 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
620 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
621 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
622 }
623 }
624 }
625 }
626 if (queue) {
627 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
628 }
629 }
630 if (skip_call) {
631 return VK_ERROR_VALIDATION_FAILED_EXT;
632 }
633 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
634 return result;
635}
636
637VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
638 bool skip_call = false;
639 {
640 std::lock_guard<std::mutex> lock(global_lock);
641 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
642 }
643 if (skip_call) {
644 return VK_ERROR_VALIDATION_FAILED_EXT;
645 }
646 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
647 return result;
648}
649
650VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
651 bool skip_call = false;
652 {
653 std::lock_guard<std::mutex> lock(global_lock);
654 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
655 }
656 if (skip_call) {
657 return VK_ERROR_VALIDATION_FAILED_EXT;
658 }
659 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
660 return result;
661}
662
663VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
664 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
665 bool skip_call = false;
666 {
667 std::lock_guard<std::mutex> lock(global_lock);
668 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
669 }
670 if (skip_call) {
671 return VK_ERROR_VALIDATION_FAILED_EXT;
672 }
673 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
674 {
675 std::lock_guard<std::mutex> lock(global_lock);
676 if (result == VK_SUCCESS) {
677 CreateNonDispatchableObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
678 }
679 }
680 return result;
681}
682
683VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
684 const VkMappedMemoryRange *pMemoryRanges) {
685 bool skip_call = false;
686 {
687 std::lock_guard<std::mutex> lock(global_lock);
688 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
689 if (pMemoryRanges) {
690 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
691 if (pMemoryRanges[idx0].memory) {
692 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
693 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
694 }
695 }
696 }
697 }
698 if (skip_call) {
699 return VK_ERROR_VALIDATION_FAILED_EXT;
700 }
701 VkResult result =
702 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
703 return result;
704}
705
706VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
707 const VkMappedMemoryRange *pMemoryRanges) {
708 bool skip_call = false;
709 {
710 std::lock_guard<std::mutex> lock(global_lock);
711 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
712 if (pMemoryRanges) {
713 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
714 if (pMemoryRanges[idx0].memory) {
715 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
716 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
717 }
718 }
719 }
720 }
721 if (skip_call) {
722 return VK_ERROR_VALIDATION_FAILED_EXT;
723 }
724 VkResult result =
725 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
726 return result;
727}
728
729VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
730 VkDeviceSize *pCommittedMemoryInBytes) {
731 bool skip_call = false;
732 {
733 std::lock_guard<std::mutex> lock(global_lock);
734 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
735 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
736 }
737 if (skip_call) {
738 return;
739 }
740 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
741}
742
743VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
744 VkDeviceSize memoryOffset) {
745 bool skip_call = false;
746 {
747 std::lock_guard<std::mutex> lock(global_lock);
748 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
749 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
750 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
751 }
752 if (skip_call) {
753 return VK_ERROR_VALIDATION_FAILED_EXT;
754 }
755 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
756 return result;
757}
758
759VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
760 bool skip_call = false;
761 {
762 std::lock_guard<std::mutex> lock(global_lock);
763 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
764 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
765 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
766 }
767 if (skip_call) {
768 return VK_ERROR_VALIDATION_FAILED_EXT;
769 }
770 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
771 return result;
772}
773
774VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
775 VkMemoryRequirements *pMemoryRequirements) {
776 bool skip_call = false;
777 {
778 std::lock_guard<std::mutex> lock(global_lock);
779 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
780 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
781 }
782 if (skip_call) {
783 return;
784 }
785 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
786}
787
788VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
789 bool skip_call = false;
790 {
791 std::lock_guard<std::mutex> lock(global_lock);
792 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
793 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
794 }
795 if (skip_call) {
796 return;
797 }
798 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
799}
800
801VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
802 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
803 bool skip_call = false;
804 {
805 std::lock_guard<std::mutex> lock(global_lock);
806 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
807 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
808 }
809 if (skip_call) {
810 return;
811 }
812 get_dispatch_table(ot_device_table_map, device)
813 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
814}
815
816VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
817 VkImageType type, VkSampleCountFlagBits samples,
818 VkImageUsageFlags usage, VkImageTiling tiling,
819 uint32_t *pPropertyCount,
820 VkSparseImageFormatProperties *pProperties) {
821 bool skip_call = false;
822 {
823 std::lock_guard<std::mutex> lock(global_lock);
824 skip_call |=
825 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
826 }
827 if (skip_call) {
828 return;
829 }
830 get_dispatch_table(ot_instance_table_map, physicalDevice)
831 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
832 pProperties);
833}
834
835VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
836 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
837 bool skip_call = false;
838 {
839 std::lock_guard<std::mutex> lock(global_lock);
840 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
841 }
842 if (skip_call) {
843 return VK_ERROR_VALIDATION_FAILED_EXT;
844 }
845 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
846 {
847 std::lock_guard<std::mutex> lock(global_lock);
848 if (result == VK_SUCCESS) {
849 CreateNonDispatchableObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
850 }
851 }
852 return result;
853}
854
855VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
856 bool skip_call = false;
857 {
858 std::lock_guard<std::mutex> lock(global_lock);
859 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
860 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
861 }
862 if (skip_call) {
863 return;
864 }
865 {
866 std::lock_guard<std::mutex> lock(global_lock);
867 DestroyNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
868 }
869 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
870}
871
872VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
873 bool skip_call = false;
874 {
875 std::lock_guard<std::mutex> lock(global_lock);
876 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
877 if (pFences) {
878 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
879 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
880 }
881 }
882 }
883 if (skip_call) {
884 return VK_ERROR_VALIDATION_FAILED_EXT;
885 }
886 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
887 return result;
888}
889
890VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
891 bool skip_call = false;
892 {
893 std::lock_guard<std::mutex> lock(global_lock);
894 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
895 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
896 }
897 if (skip_call) {
898 return VK_ERROR_VALIDATION_FAILED_EXT;
899 }
900 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
901 return result;
902}
903
904VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
905 uint64_t timeout) {
906 bool skip_call = false;
907 {
908 std::lock_guard<std::mutex> lock(global_lock);
909 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
910 if (pFences) {
911 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
912 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
913 }
914 }
915 }
916 if (skip_call) {
917 return VK_ERROR_VALIDATION_FAILED_EXT;
918 }
919 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
920 return result;
921}
922
923VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
924 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
925 bool skip_call = false;
926 {
927 std::lock_guard<std::mutex> lock(global_lock);
928 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
929 }
930 if (skip_call) {
931 return VK_ERROR_VALIDATION_FAILED_EXT;
932 }
933 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
934 {
935 std::lock_guard<std::mutex> lock(global_lock);
936 if (result == VK_SUCCESS) {
937 CreateNonDispatchableObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
938 }
939 }
940 return result;
941}
942
943VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
944 bool skip_call = false;
945 {
946 std::lock_guard<std::mutex> lock(global_lock);
947 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
948 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
949 }
950 if (skip_call) {
951 return;
952 }
953 {
954 std::lock_guard<std::mutex> lock(global_lock);
955 DestroyNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
956 }
957 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
958}
959
960VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
961 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
962 bool skip_call = false;
963 {
964 std::lock_guard<std::mutex> lock(global_lock);
965 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
966 }
967 if (skip_call) {
968 return VK_ERROR_VALIDATION_FAILED_EXT;
969 }
970 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
971 {
972 std::lock_guard<std::mutex> lock(global_lock);
973 if (result == VK_SUCCESS) {
974 CreateNonDispatchableObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
975 }
976 }
977 return result;
978}
979
980VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
981 bool skip_call = false;
982 {
983 std::lock_guard<std::mutex> lock(global_lock);
984 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
985 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
986 }
987 if (skip_call) {
988 return;
989 }
990 {
991 std::lock_guard<std::mutex> lock(global_lock);
992 DestroyNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
993 }
994 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
995}
996
997VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
998 bool skip_call = false;
999 {
1000 std::lock_guard<std::mutex> lock(global_lock);
1001 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1002 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1003 }
1004 if (skip_call) {
1005 return VK_ERROR_VALIDATION_FAILED_EXT;
1006 }
1007 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
1008 return result;
1009}
1010
1011VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
1012 bool skip_call = false;
1013 {
1014 std::lock_guard<std::mutex> lock(global_lock);
1015 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1016 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1017 }
1018 if (skip_call) {
1019 return VK_ERROR_VALIDATION_FAILED_EXT;
1020 }
1021 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
1022 return result;
1023}
1024
1025VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
1026 bool skip_call = false;
1027 {
1028 std::lock_guard<std::mutex> lock(global_lock);
1029 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1030 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1031 }
1032 if (skip_call) {
1033 return VK_ERROR_VALIDATION_FAILED_EXT;
1034 }
1035 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
1036 return result;
1037}
1038
1039VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1040 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1041 bool skip_call = false;
1042 {
1043 std::lock_guard<std::mutex> lock(global_lock);
1044 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1045 }
1046 if (skip_call) {
1047 return VK_ERROR_VALIDATION_FAILED_EXT;
1048 }
1049 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1050 {
1051 std::lock_guard<std::mutex> lock(global_lock);
1052 if (result == VK_SUCCESS) {
1053 CreateNonDispatchableObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
1054 }
1055 }
1056 return result;
1057}
1058
1059VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1060 bool skip_call = false;
1061 {
1062 std::lock_guard<std::mutex> lock(global_lock);
1063 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1064 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1065 }
1066 if (skip_call) {
1067 return;
1068 }
1069 {
1070 std::lock_guard<std::mutex> lock(global_lock);
1071 DestroyNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
1072 }
1073 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1074}
1075
1076VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1077 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1078 bool skip_call = false;
1079 {
1080 std::lock_guard<std::mutex> lock(global_lock);
1081 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1082 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1083 }
1084 if (skip_call) {
1085 return VK_ERROR_VALIDATION_FAILED_EXT;
1086 }
1087 VkResult result = get_dispatch_table(ot_device_table_map, device)
1088 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1089 return result;
1090}
1091
1092VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1093 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1094 bool skip_call = false;
1095 {
1096 std::lock_guard<std::mutex> lock(global_lock);
1097 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1098 }
1099 if (skip_call) {
1100 return VK_ERROR_VALIDATION_FAILED_EXT;
1101 }
1102 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1103 {
1104 std::lock_guard<std::mutex> lock(global_lock);
1105 if (result == VK_SUCCESS) {
1106 CreateNonDispatchableObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
1107 }
1108 }
1109 return result;
1110}
1111
1112VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1113 bool skip_call = false;
1114 {
1115 std::lock_guard<std::mutex> lock(global_lock);
1116 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1117 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1118 }
1119 if (skip_call) {
1120 return;
1121 }
1122 {
1123 std::lock_guard<std::mutex> lock(global_lock);
1124 DestroyNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
1125 }
1126 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1127}
1128
1129VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1130 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1131 bool skip_call = false;
1132 {
1133 std::lock_guard<std::mutex> lock(global_lock);
1134 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1135 if (pCreateInfo) {
1136 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1137 }
1138 }
1139 if (skip_call) {
1140 return VK_ERROR_VALIDATION_FAILED_EXT;
1141 }
1142 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1143 {
1144 std::lock_guard<std::mutex> lock(global_lock);
1145 if (result == VK_SUCCESS) {
1146 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
1147 }
1148 }
1149 return result;
1150}
1151
1152VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1153 bool skip_call = false;
1154 {
1155 std::lock_guard<std::mutex> lock(global_lock);
1156 skip_call |= ValidateNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1157 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1158 }
1159 if (skip_call) {
1160 return;
1161 }
1162 {
1163 std::lock_guard<std::mutex> lock(global_lock);
1164 DestroyNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
1165 }
1166 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1167}
1168
1169VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1170 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1171 bool skip_call = false;
1172 {
1173 std::lock_guard<std::mutex> lock(global_lock);
1174 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1175 }
1176 if (skip_call) {
1177 return VK_ERROR_VALIDATION_FAILED_EXT;
1178 }
1179 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1180 {
1181 std::lock_guard<std::mutex> lock(global_lock);
1182 if (result == VK_SUCCESS) {
1183 CreateNonDispatchableObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
1184 }
1185 }
1186 return result;
1187}
1188
1189VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1190 bool skip_call = false;
1191 {
1192 std::lock_guard<std::mutex> lock(global_lock);
1193 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1194 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1195 }
1196 if (skip_call) {
1197 return;
1198 }
1199 {
1200 std::lock_guard<std::mutex> lock(global_lock);
1201 DestroyNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
1202 }
1203 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1204}
1205
1206VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1207 VkSubresourceLayout *pLayout) {
1208 bool skip_call = false;
1209 {
1210 std::lock_guard<std::mutex> lock(global_lock);
1211 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1212 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1213 }
1214 if (skip_call) {
1215 return;
1216 }
1217 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1218}
1219
1220VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1221 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1222 bool skip_call = false;
1223 {
1224 std::lock_guard<std::mutex> lock(global_lock);
1225 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1226 if (pCreateInfo) {
1227 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1228 }
1229 }
1230 if (skip_call) {
1231 return VK_ERROR_VALIDATION_FAILED_EXT;
1232 }
1233 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1234 {
1235 std::lock_guard<std::mutex> lock(global_lock);
1236 if (result == VK_SUCCESS) {
1237 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
1238 }
1239 }
1240 return result;
1241}
1242
1243VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1244 bool skip_call = false;
1245 {
1246 std::lock_guard<std::mutex> lock(global_lock);
1247 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1248 skip_call |= ValidateNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1249 }
1250 if (skip_call) {
1251 return;
1252 }
1253 {
1254 std::lock_guard<std::mutex> lock(global_lock);
1255 DestroyNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
1256 }
1257 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1258}
1259
1260VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1261 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1262 bool skip_call = false;
1263 {
1264 std::lock_guard<std::mutex> lock(global_lock);
1265 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1266 }
1267 if (skip_call) {
1268 return VK_ERROR_VALIDATION_FAILED_EXT;
1269 }
1270 VkResult result =
1271 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1272 {
1273 std::lock_guard<std::mutex> lock(global_lock);
1274 if (result == VK_SUCCESS) {
1275 CreateNonDispatchableObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
1276 }
1277 }
1278 return result;
1279}
1280
1281VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1282 const VkAllocationCallbacks *pAllocator) {
1283 bool skip_call = false;
1284 {
1285 std::lock_guard<std::mutex> lock(global_lock);
1286 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1287 skip_call |= ValidateNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
1288 }
1289 if (skip_call) {
1290 return;
1291 }
1292 {
1293 std::lock_guard<std::mutex> lock(global_lock);
1294 DestroyNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
1295 }
1296 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1297}
1298
1299VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1300 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1301 bool skip_call = false;
1302 {
1303 std::lock_guard<std::mutex> lock(global_lock);
1304 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1305 }
1306 if (skip_call) {
1307 return VK_ERROR_VALIDATION_FAILED_EXT;
1308 }
1309 VkResult result =
1310 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1311 {
1312 std::lock_guard<std::mutex> lock(global_lock);
1313 if (result == VK_SUCCESS) {
1314 CreateNonDispatchableObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
1315 }
1316 }
1317 return result;
1318}
1319
1320VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1321 const VkAllocationCallbacks *pAllocator) {
1322 bool skip_call = false;
1323 {
1324 std::lock_guard<std::mutex> lock(global_lock);
1325 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1326 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1327 }
1328 if (skip_call) {
1329 return;
1330 }
1331 {
1332 std::lock_guard<std::mutex> lock(global_lock);
1333 DestroyNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
1334 }
1335 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1336}
1337
1338VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1339 void *pData) {
1340 bool skip_call = false;
1341 {
1342 std::lock_guard<std::mutex> lock(global_lock);
1343 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1344 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1345 }
1346 if (skip_call) {
1347 return VK_ERROR_VALIDATION_FAILED_EXT;
1348 }
1349 VkResult result =
1350 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1351 return result;
1352}
1353
1354VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1355 const VkPipelineCache *pSrcCaches) {
1356 bool skip_call = false;
1357 {
1358 std::lock_guard<std::mutex> lock(global_lock);
1359 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1360 skip_call |= ValidateNonDispatchableObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1361 if (pSrcCaches) {
1362 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1363 skip_call |=
1364 ValidateNonDispatchableObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1365 }
1366 }
1367 }
1368 if (skip_call) {
1369 return VK_ERROR_VALIDATION_FAILED_EXT;
1370 }
1371 VkResult result =
1372 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1373 return result;
1374}
1375
1376VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1377 bool skip_call = false;
1378 {
1379 std::lock_guard<std::mutex> lock(global_lock);
1380 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1381 skip_call |= ValidateNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1382 }
1383 if (skip_call) {
1384 return;
1385 }
1386 {
1387 std::lock_guard<std::mutex> lock(global_lock);
1388 DestroyNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
1389 }
1390 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1391}
1392
1393VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1394 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1395 bool skip_call = false;
1396 {
1397 std::lock_guard<std::mutex> lock(global_lock);
1398 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1399 if (pCreateInfo) {
1400 if (pCreateInfo->pSetLayouts) {
1401 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
1402 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pSetLayouts[idx0],
1403 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1404 }
1405 }
1406 }
1407 }
1408 if (skip_call) {
1409 return VK_ERROR_VALIDATION_FAILED_EXT;
1410 }
1411 VkResult result =
1412 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1413 {
1414 std::lock_guard<std::mutex> lock(global_lock);
1415 if (result == VK_SUCCESS) {
1416 CreateNonDispatchableObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
1417 }
1418 }
1419 return result;
1420}
1421
1422VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1423 const VkAllocationCallbacks *pAllocator) {
1424 bool skip_call = false;
1425 {
1426 std::lock_guard<std::mutex> lock(global_lock);
1427 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1428 skip_call |= ValidateNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1429 }
1430 if (skip_call) {
1431 return;
1432 }
1433 {
1434 std::lock_guard<std::mutex> lock(global_lock);
1435 DestroyNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
1436 }
1437 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1438}
1439
1440VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1441 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1442 bool skip_call = false;
1443 {
1444 std::lock_guard<std::mutex> lock(global_lock);
1445 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1446 }
1447 if (skip_call) {
1448 return VK_ERROR_VALIDATION_FAILED_EXT;
1449 }
1450 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1451 {
1452 std::lock_guard<std::mutex> lock(global_lock);
1453 if (result == VK_SUCCESS) {
1454 CreateNonDispatchableObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
1455 }
1456 }
1457 return result;
1458}
1459
1460VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1461 bool skip_call = false;
1462 {
1463 std::lock_guard<std::mutex> lock(global_lock);
1464 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1465 skip_call |= ValidateNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1466 }
1467 if (skip_call) {
1468 return;
1469 }
1470 {
1471 std::lock_guard<std::mutex> lock(global_lock);
1472 DestroyNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
1473 }
1474 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1475}
1476
1477VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1478 const VkAllocationCallbacks *pAllocator,
1479 VkDescriptorSetLayout *pSetLayout) {
1480 bool skip_call = false;
1481 {
1482 std::lock_guard<std::mutex> lock(global_lock);
1483 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1484 if (pCreateInfo) {
1485 if (pCreateInfo->pBindings) {
1486 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1487 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1488 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1489 skip_call |=
1490 ValidateNonDispatchableObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
1491 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1492 }
1493 }
1494 }
1495 }
1496 }
1497 }
1498 if (skip_call) {
1499 return VK_ERROR_VALIDATION_FAILED_EXT;
1500 }
1501 VkResult result =
1502 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1503 {
1504 std::lock_guard<std::mutex> lock(global_lock);
1505 if (result == VK_SUCCESS) {
1506 CreateNonDispatchableObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
1507 }
1508 }
1509 return result;
1510}
1511
1512VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1513 const VkAllocationCallbacks *pAllocator) {
1514 bool skip_call = false;
1515 {
1516 std::lock_guard<std::mutex> lock(global_lock);
1517 skip_call |= ValidateNonDispatchableObject(device, descriptorSetLayout,
1518 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1519 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1520 }
1521 if (skip_call) {
1522 return;
1523 }
1524 {
1525 std::lock_guard<std::mutex> lock(global_lock);
1526 DestroyNonDispatchableObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
1527 }
1528 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1529}
1530
1531VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1532 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1533 bool skip_call = false;
1534 {
1535 std::lock_guard<std::mutex> lock(global_lock);
1536 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1537 }
1538 if (skip_call) {
1539 return VK_ERROR_VALIDATION_FAILED_EXT;
1540 }
1541 VkResult result =
1542 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1543 {
1544 std::lock_guard<std::mutex> lock(global_lock);
1545 if (result == VK_SUCCESS) {
1546 CreateNonDispatchableObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
1547 }
1548 }
1549 return result;
1550}
1551
1552VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1553 VkDescriptorPoolResetFlags flags) {
1554 bool skip_call = false;
1555 {
1556 std::lock_guard<std::mutex> lock(global_lock);
1557 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1558 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1559 }
1560 if (skip_call) {
1561 return VK_ERROR_VALIDATION_FAILED_EXT;
1562 }
1563 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1564 return result;
1565}
1566
1567VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1568 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1569 const VkCopyDescriptorSet *pDescriptorCopies) {
1570 bool skip_call = false;
1571 {
1572 std::lock_guard<std::mutex> lock(global_lock);
1573 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1574 if (pDescriptorCopies) {
1575 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1576 if (pDescriptorCopies[idx0].dstSet) {
1577 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].dstSet,
1578 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1579 }
1580 if (pDescriptorCopies[idx0].srcSet) {
1581 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].srcSet,
1582 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1583 }
1584 }
1585 }
1586 if (pDescriptorWrites) {
1587 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1588 if (pDescriptorWrites[idx1].dstSet) {
1589 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].dstSet,
1590 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1591 }
1592 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1593 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1594 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1595 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1596 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1597 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
1598 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
1599 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1600 }
1601 }
1602 }
1603 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1604 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1605 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1606 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1607 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1608 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1609 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
1610 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
1611 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1612 }
1613 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
1614 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
1615 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1616 }
1617 }
1618 }
1619 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1620 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1621 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
1622 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
1623 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1624 }
1625 }
1626 }
1627 }
1628 }
1629 if (skip_call) {
1630 return;
1631 }
1632 get_dispatch_table(ot_device_table_map, device)
1633 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1634}
1635
1636VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1637 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1638 bool skip_call = false;
1639 {
1640 std::lock_guard<std::mutex> lock(global_lock);
1641 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1642 if (pCreateInfo) {
1643 if (pCreateInfo->pAttachments) {
1644 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
1645 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pAttachments[idx0],
1646 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1647 }
1648 }
1649 if (pCreateInfo->renderPass) {
1650 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->renderPass,
1651 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1652 }
1653 }
1654 }
1655 if (skip_call) {
1656 return VK_ERROR_VALIDATION_FAILED_EXT;
1657 }
1658 VkResult result =
1659 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1660 {
1661 std::lock_guard<std::mutex> lock(global_lock);
1662 if (result == VK_SUCCESS) {
1663 CreateNonDispatchableObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
1664 }
1665 }
1666 return result;
1667}
1668
1669VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1670 bool skip_call = false;
1671 {
1672 std::lock_guard<std::mutex> lock(global_lock);
1673 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1674 skip_call |= ValidateNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
1675 }
1676 if (skip_call) {
1677 return;
1678 }
1679 {
1680 std::lock_guard<std::mutex> lock(global_lock);
1681 DestroyNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
1682 }
1683 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1684}
1685
1686VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1687 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1688 bool skip_call = false;
1689 {
1690 std::lock_guard<std::mutex> lock(global_lock);
1691 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1692 }
1693 if (skip_call) {
1694 return VK_ERROR_VALIDATION_FAILED_EXT;
1695 }
1696 VkResult result =
1697 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1698 {
1699 std::lock_guard<std::mutex> lock(global_lock);
1700 if (result == VK_SUCCESS) {
1701 CreateNonDispatchableObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
1702 }
1703 }
1704 return result;
1705}
1706
1707VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1708 bool skip_call = false;
1709 {
1710 std::lock_guard<std::mutex> lock(global_lock);
1711 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1712 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1713 }
1714 if (skip_call) {
1715 return;
1716 }
1717 {
1718 std::lock_guard<std::mutex> lock(global_lock);
1719 DestroyNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
1720 }
1721 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1722}
1723
1724VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1725 bool skip_call = false;
1726 {
1727 std::lock_guard<std::mutex> lock(global_lock);
1728 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1729 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1730 }
1731 if (skip_call) {
1732 return;
1733 }
1734 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1735}
1736
1737VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1738 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1739 bool skip_call = false;
1740 {
1741 std::lock_guard<std::mutex> lock(global_lock);
1742 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1743 }
1744 if (skip_call) {
1745 return VK_ERROR_VALIDATION_FAILED_EXT;
1746 }
1747 VkResult result =
1748 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1749 {
1750 std::lock_guard<std::mutex> lock(global_lock);
1751 if (result == VK_SUCCESS) {
1752 CreateNonDispatchableObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
1753 }
1754 }
1755 return result;
1756}
1757
1758VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1759 bool skip_call = false;
1760 {
1761 std::lock_guard<std::mutex> lock(global_lock);
1762 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1763 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1764 }
1765 if (skip_call) {
1766 return VK_ERROR_VALIDATION_FAILED_EXT;
1767 }
1768 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1769 return result;
1770}
1771
1772VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1773 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1774 bool skip_call = false;
1775 {
1776 std::lock_guard<std::mutex> lock(global_lock);
1777 skip_call |=
1778 ValidateDispatchableObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1779 if (begin_info) {
1780 OBJTRACK_NODE *pNode =
1781 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1782 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
1783 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
1784 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
1785 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
1786 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1787 }
1788 }
1789 }
1790 if (skip_call) {
1791 return VK_ERROR_VALIDATION_FAILED_EXT;
1792 }
1793 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1794 return result;
1795}
1796
1797VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1798 bool skip_call = false;
1799 {
1800 std::lock_guard<std::mutex> lock(global_lock);
1801 skip_call |=
1802 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1803 }
1804 if (skip_call) {
1805 return VK_ERROR_VALIDATION_FAILED_EXT;
1806 }
1807 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1808 return result;
1809}
1810
1811VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1812 bool skip_call = false;
1813 {
1814 std::lock_guard<std::mutex> lock(global_lock);
1815 skip_call |=
1816 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1817 }
1818 if (skip_call) {
1819 return VK_ERROR_VALIDATION_FAILED_EXT;
1820 }
1821 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1822 return result;
1823}
1824
1825VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1826 VkPipeline pipeline) {
1827 bool skip_call = false;
1828 {
1829 std::lock_guard<std::mutex> lock(global_lock);
1830 skip_call |=
1831 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1832 skip_call |= ValidateNonDispatchableObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1833 }
1834 if (skip_call) {
1835 return;
1836 }
1837 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1838}
1839
1840VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1841 const VkViewport *pViewports) {
1842 bool skip_call = false;
1843 {
1844 std::lock_guard<std::mutex> lock(global_lock);
1845 skip_call |=
1846 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1847 }
1848 if (skip_call) {
1849 return;
1850 }
1851 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1852}
1853
1854VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1855 const VkRect2D *pScissors) {
1856 bool skip_call = false;
1857 {
1858 std::lock_guard<std::mutex> lock(global_lock);
1859 skip_call |=
1860 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1861 }
1862 if (skip_call) {
1863 return;
1864 }
1865 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1866}
1867
1868VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1869 bool skip_call = false;
1870 {
1871 std::lock_guard<std::mutex> lock(global_lock);
1872 skip_call |=
1873 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1874 }
1875 if (skip_call) {
1876 return;
1877 }
1878 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1879}
1880
1881VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1882 float depthBiasSlopeFactor) {
1883 bool skip_call = false;
1884 {
1885 std::lock_guard<std::mutex> lock(global_lock);
1886 skip_call |=
1887 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1888 }
1889 if (skip_call) {
1890 return;
1891 }
1892 get_dispatch_table(ot_device_table_map, commandBuffer)
1893 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1894}
1895
1896VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1897 bool skip_call = false;
1898 {
1899 std::lock_guard<std::mutex> lock(global_lock);
1900 skip_call |=
1901 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1902 }
1903 if (skip_call) {
1904 return;
1905 }
1906 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1907}
1908
1909VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1910 bool skip_call = false;
1911 {
1912 std::lock_guard<std::mutex> lock(global_lock);
1913 skip_call |=
1914 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1915 }
1916 if (skip_call) {
1917 return;
1918 }
1919 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1920}
1921
1922VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1923 uint32_t compareMask) {
1924 bool skip_call = false;
1925 {
1926 std::lock_guard<std::mutex> lock(global_lock);
1927 skip_call |=
1928 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1929 }
1930 if (skip_call) {
1931 return;
1932 }
1933 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1934}
1935
1936VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1937 bool skip_call = false;
1938 {
1939 std::lock_guard<std::mutex> lock(global_lock);
1940 skip_call |=
1941 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1942 }
1943 if (skip_call) {
1944 return;
1945 }
1946 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1947}
1948
1949VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1950 bool skip_call = false;
1951 {
1952 std::lock_guard<std::mutex> lock(global_lock);
1953 skip_call |=
1954 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1955 }
1956 if (skip_call) {
1957 return;
1958 }
1959 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1960}
1961
1962VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1963 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1964 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1965 const uint32_t *pDynamicOffsets) {
1966 bool skip_call = false;
1967 {
1968 std::lock_guard<std::mutex> lock(global_lock);
1969 skip_call |=
1970 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1971 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1972 if (pDescriptorSets) {
1973 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
1974 skip_call |= ValidateNonDispatchableObject(commandBuffer, pDescriptorSets[idx0],
1975 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1976 }
1977 }
1978 }
1979 if (skip_call) {
1980 return;
1981 }
1982 get_dispatch_table(ot_device_table_map, commandBuffer)
1983 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
1984 dynamicOffsetCount, pDynamicOffsets);
1985}
1986
1987VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1988 VkIndexType indexType) {
1989 bool skip_call = false;
1990 {
1991 std::lock_guard<std::mutex> lock(global_lock);
1992 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1993 skip_call |=
1994 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1995 }
1996 if (skip_call) {
1997 return;
1998 }
1999 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
2000}
2001
2002VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
2003 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
2004 bool skip_call = false;
2005 {
2006 std::lock_guard<std::mutex> lock(global_lock);
2007 skip_call |=
2008 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2009 if (pBuffers) {
2010 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
2011 skip_call |=
2012 ValidateNonDispatchableObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2013 }
2014 }
2015 }
2016 if (skip_call) {
2017 return;
2018 }
2019 get_dispatch_table(ot_device_table_map, commandBuffer)
2020 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
2021}
2022
2023VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
2024 uint32_t firstVertex, uint32_t firstInstance) {
2025 bool skip_call = false;
2026 {
2027 std::lock_guard<std::mutex> lock(global_lock);
2028 skip_call |=
2029 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2030 }
2031 if (skip_call) {
2032 return;
2033 }
2034 get_dispatch_table(ot_device_table_map, commandBuffer)
2035 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2036}
2037
2038VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2039 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2040 bool skip_call = false;
2041 {
2042 std::lock_guard<std::mutex> lock(global_lock);
2043 skip_call |=
2044 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2045 }
2046 if (skip_call) {
2047 return;
2048 }
2049 get_dispatch_table(ot_device_table_map, commandBuffer)
2050 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2051}
2052
2053VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2054 uint32_t stride) {
2055 bool skip_call = false;
2056 {
2057 std::lock_guard<std::mutex> lock(global_lock);
2058 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2059 skip_call |=
2060 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2061 }
2062 if (skip_call) {
2063 return;
2064 }
2065 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2066}
2067
2068VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2069 uint32_t drawCount, uint32_t stride) {
2070 bool skip_call = false;
2071 {
2072 std::lock_guard<std::mutex> lock(global_lock);
2073 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2074 skip_call |=
2075 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2076 }
2077 if (skip_call) {
2078 return;
2079 }
2080 get_dispatch_table(ot_device_table_map, commandBuffer)
2081 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2082}
2083
2084VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2085 bool skip_call = false;
2086 {
2087 std::lock_guard<std::mutex> lock(global_lock);
2088 skip_call |=
2089 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2090 }
2091 if (skip_call) {
2092 return;
2093 }
2094 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2095}
2096
2097VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2098 bool skip_call = false;
2099 {
2100 std::lock_guard<std::mutex> lock(global_lock);
2101 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2102 skip_call |=
2103 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2104 }
2105 if (skip_call) {
2106 return;
2107 }
2108 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2109}
2110
2111VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2112 uint32_t regionCount, const VkBufferCopy *pRegions) {
2113 bool skip_call = false;
2114 {
2115 std::lock_guard<std::mutex> lock(global_lock);
2116 skip_call |=
2117 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2118 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2119 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2120 }
2121 if (skip_call) {
2122 return;
2123 }
2124 get_dispatch_table(ot_device_table_map, commandBuffer)
2125 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2126}
2127
2128VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2129 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2130 const VkImageCopy *pRegions) {
2131 bool skip_call = false;
2132 {
2133 std::lock_guard<std::mutex> lock(global_lock);
2134 skip_call |=
2135 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2136 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2137 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2138 }
2139 if (skip_call) {
2140 return;
2141 }
2142 get_dispatch_table(ot_device_table_map, commandBuffer)
2143 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2144}
2145
2146VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2147 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2148 const VkImageBlit *pRegions, VkFilter filter) {
2149 bool skip_call = false;
2150 {
2151 std::lock_guard<std::mutex> lock(global_lock);
2152 skip_call |=
2153 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2154 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2155 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2156 }
2157 if (skip_call) {
2158 return;
2159 }
2160 get_dispatch_table(ot_device_table_map, commandBuffer)
2161 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2162}
2163
2164VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2165 VkImageLayout dstImageLayout, uint32_t regionCount,
2166 const VkBufferImageCopy *pRegions) {
2167 bool skip_call = false;
2168 {
2169 std::lock_guard<std::mutex> lock(global_lock);
2170 skip_call |=
2171 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2172 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2173 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2174 }
2175 if (skip_call) {
2176 return;
2177 }
2178 get_dispatch_table(ot_device_table_map, commandBuffer)
2179 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2180}
2181
2182VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2183 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2184 bool skip_call = false;
2185 {
2186 std::lock_guard<std::mutex> lock(global_lock);
2187 skip_call |=
2188 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2189 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2190 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2191 }
2192 if (skip_call) {
2193 return;
2194 }
2195 get_dispatch_table(ot_device_table_map, commandBuffer)
2196 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2197}
2198
2199VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2200 VkDeviceSize dataSize, const uint32_t *pData) {
2201 bool skip_call = false;
2202 {
2203 std::lock_guard<std::mutex> lock(global_lock);
2204 skip_call |=
2205 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2206 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2207 }
2208 if (skip_call) {
2209 return;
2210 }
2211 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2212}
2213
2214VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2215 VkDeviceSize size, uint32_t data) {
2216 bool skip_call = false;
2217 {
2218 std::lock_guard<std::mutex> lock(global_lock);
2219 skip_call |=
2220 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2221 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2222 }
2223 if (skip_call) {
2224 return;
2225 }
2226 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2227}
2228
2229VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2230 const VkClearColorValue *pColor, uint32_t rangeCount,
2231 const VkImageSubresourceRange *pRanges) {
2232 bool skip_call = false;
2233 {
2234 std::lock_guard<std::mutex> lock(global_lock);
2235 skip_call |=
2236 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2237 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2238 }
2239 if (skip_call) {
2240 return;
2241 }
2242 get_dispatch_table(ot_device_table_map, commandBuffer)
2243 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2244}
2245
2246VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2247 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2248 const VkImageSubresourceRange *pRanges) {
2249 bool skip_call = false;
2250 {
2251 std::lock_guard<std::mutex> lock(global_lock);
2252 skip_call |=
2253 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2254 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2255 }
2256 if (skip_call) {
2257 return;
2258 }
2259 get_dispatch_table(ot_device_table_map, commandBuffer)
2260 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2261}
2262
2263VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2264 const VkClearAttachment *pAttachments, uint32_t rectCount,
2265 const VkClearRect *pRects) {
2266 bool skip_call = false;
2267 {
2268 std::lock_guard<std::mutex> lock(global_lock);
2269 skip_call |=
2270 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2271 }
2272 if (skip_call) {
2273 return;
2274 }
2275 get_dispatch_table(ot_device_table_map, commandBuffer)
2276 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2277}
2278
2279VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2280 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2281 const VkImageResolve *pRegions) {
2282 bool skip_call = false;
2283 {
2284 std::lock_guard<std::mutex> lock(global_lock);
2285 skip_call |=
2286 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2287 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2288 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2289 }
2290 if (skip_call) {
2291 return;
2292 }
2293 get_dispatch_table(ot_device_table_map, commandBuffer)
2294 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2295}
2296
2297VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2298 bool skip_call = false;
2299 {
2300 std::lock_guard<std::mutex> lock(global_lock);
2301 skip_call |=
2302 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2303 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2304 }
2305 if (skip_call) {
2306 return;
2307 }
2308 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2309}
2310
2311VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2312 bool skip_call = false;
2313 {
2314 std::lock_guard<std::mutex> lock(global_lock);
2315 skip_call |=
2316 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2317 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2318 }
2319 if (skip_call) {
2320 return;
2321 }
2322 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2323}
2324
2325VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2326 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2327 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2328 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2329 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2330 bool skip_call = false;
2331 {
2332 std::lock_guard<std::mutex> lock(global_lock);
2333 skip_call |=
2334 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2335 if (pBufferMemoryBarriers) {
2336 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2337 if (pBufferMemoryBarriers[idx0].buffer) {
2338 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2339 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2340 }
2341 }
2342 }
2343 if (pEvents) {
2344 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2345 skip_call |=
2346 ValidateNonDispatchableObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2347 }
2348 }
2349 if (pImageMemoryBarriers) {
2350 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2351 if (pImageMemoryBarriers[idx2].image) {
2352 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx2].image,
2353 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2354 }
2355 }
2356 }
2357 }
2358 if (skip_call) {
2359 return;
2360 }
2361 get_dispatch_table(ot_device_table_map, commandBuffer)
2362 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2363 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2364}
2365
2366VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2367 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2368 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2369 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2370 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2371 bool skip_call = false;
2372 {
2373 std::lock_guard<std::mutex> lock(global_lock);
2374 skip_call |=
2375 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2376 if (pBufferMemoryBarriers) {
2377 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2378 if (pBufferMemoryBarriers[idx0].buffer) {
2379 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2380 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2381 }
2382 }
2383 }
2384 if (pImageMemoryBarriers) {
2385 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2386 if (pImageMemoryBarriers[idx1].image) {
2387 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx1].image,
2388 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2389 }
2390 }
2391 }
2392 }
2393 if (skip_call) {
2394 return;
2395 }
2396 get_dispatch_table(ot_device_table_map, commandBuffer)
2397 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2398 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2399}
2400
2401VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2402 VkQueryControlFlags flags) {
2403 bool skip_call = false;
2404 {
2405 std::lock_guard<std::mutex> lock(global_lock);
2406 skip_call |=
2407 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2408 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2409 }
2410 if (skip_call) {
2411 return;
2412 }
2413 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2414}
2415
2416VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2417 bool skip_call = false;
2418 {
2419 std::lock_guard<std::mutex> lock(global_lock);
2420 skip_call |=
2421 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2422 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2423 }
2424 if (skip_call) {
2425 return;
2426 }
2427 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2428}
2429
2430VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2431 uint32_t queryCount) {
2432 bool skip_call = false;
2433 {
2434 std::lock_guard<std::mutex> lock(global_lock);
2435 skip_call |=
2436 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2437 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2438 }
2439 if (skip_call) {
2440 return;
2441 }
2442 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2443}
2444
2445VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2446 VkQueryPool queryPool, uint32_t query) {
2447 bool skip_call = false;
2448 {
2449 std::lock_guard<std::mutex> lock(global_lock);
2450 skip_call |=
2451 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2452 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2453 }
2454 if (skip_call) {
2455 return;
2456 }
2457 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2458}
2459
2460VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2461 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2462 VkDeviceSize stride, VkQueryResultFlags flags) {
2463 bool skip_call = false;
2464 {
2465 std::lock_guard<std::mutex> lock(global_lock);
2466 skip_call |=
2467 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2468 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2469 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2470 }
2471 if (skip_call) {
2472 return;
2473 }
2474 get_dispatch_table(ot_device_table_map, commandBuffer)
2475 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2476}
2477
2478VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2479 uint32_t offset, uint32_t size, const void *pValues) {
2480 bool skip_call = false;
2481 {
2482 std::lock_guard<std::mutex> lock(global_lock);
2483 skip_call |=
2484 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2485 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
2486 }
2487 if (skip_call) {
2488 return;
2489 }
2490 get_dispatch_table(ot_device_table_map, commandBuffer)
2491 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2492}
2493
2494VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2495 VkSubpassContents contents) {
2496 bool skip_call = false;
2497 {
2498 std::lock_guard<std::mutex> lock(global_lock);
2499 skip_call |=
2500 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2501 if (pRenderPassBegin) {
2502 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->framebuffer,
2503 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
2504 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->renderPass,
2505 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2506 }
2507 }
2508 if (skip_call) {
2509 return;
2510 }
2511 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2512}
2513
2514VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2515 bool skip_call = false;
2516 {
2517 std::lock_guard<std::mutex> lock(global_lock);
2518 skip_call |=
2519 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2520 }
2521 if (skip_call) {
2522 return;
2523 }
2524 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2525}
2526
2527VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2528 bool skip_call = false;
2529 {
2530 std::lock_guard<std::mutex> lock(global_lock);
2531 skip_call |=
2532 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2533 }
2534 if (skip_call) {
2535 return;
2536 }
2537 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2538}
2539
2540VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2541 const VkCommandBuffer *pCommandBuffers) {
2542 bool skip_call = false;
2543 {
2544 std::lock_guard<std::mutex> lock(global_lock);
2545 skip_call |=
2546 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2547 if (pCommandBuffers) {
2548 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
2549 skip_call |= ValidateDispatchableObject(commandBuffer, pCommandBuffers[idx0],
2550 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2551 }
2552 }
2553 }
2554 if (skip_call) {
2555 return;
2556 }
2557 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2558}
2559
2560VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2561 bool skip_call = false;
2562 {
2563 std::lock_guard<std::mutex> lock(global_lock);
2564 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2565 skip_call |= ValidateNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2566 }
2567 if (skip_call) {
2568 return;
2569 }
2570 {
2571 std::lock_guard<std::mutex> lock(global_lock);
2572 DestroyNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2573 }
2574 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2575}
2576
2577VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2578 VkSurfaceKHR surface, VkBool32 *pSupported) {
2579 bool skip_call = false;
2580 {
2581 std::lock_guard<std::mutex> lock(global_lock);
2582 skip_call |=
2583 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2584 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2585 }
2586 if (skip_call) {
2587 return VK_ERROR_VALIDATION_FAILED_EXT;
2588 }
2589 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2590 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2591 return result;
2592}
2593
2594VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2595 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2596 bool skip_call = false;
2597 {
2598 std::lock_guard<std::mutex> lock(global_lock);
2599 skip_call |=
2600 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2601 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2602 }
2603 if (skip_call) {
2604 return VK_ERROR_VALIDATION_FAILED_EXT;
2605 }
2606 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2607 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2608 return result;
2609}
2610
2611VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2612 uint32_t *pSurfaceFormatCount,
2613 VkSurfaceFormatKHR *pSurfaceFormats) {
2614 bool skip_call = false;
2615 {
2616 std::lock_guard<std::mutex> lock(global_lock);
2617 skip_call |=
2618 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2619 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2620 }
2621 if (skip_call) {
2622 return VK_ERROR_VALIDATION_FAILED_EXT;
2623 }
2624 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2625 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2626 return result;
2627}
2628
2629VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2630 uint32_t *pPresentModeCount,
2631 VkPresentModeKHR *pPresentModes) {
2632 bool skip_call = false;
2633 {
2634 std::lock_guard<std::mutex> lock(global_lock);
2635 skip_call |=
2636 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2637 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2638 }
2639 if (skip_call) {
2640 return VK_ERROR_VALIDATION_FAILED_EXT;
2641 }
2642 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2643 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2644 return result;
2645}
2646
2647VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2648 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2649 bool skip_call = false;
2650 {
2651 std::lock_guard<std::mutex> lock(global_lock);
2652 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2653 if (pCreateInfo) {
2654 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->oldSwapchain,
2655 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2656 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2657 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfo->surface,
2658 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2659 }
2660 }
2661 if (skip_call) {
2662 return VK_ERROR_VALIDATION_FAILED_EXT;
2663 }
2664 VkResult result =
2665 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2666 {
2667 std::lock_guard<std::mutex> lock(global_lock);
2668 if (result == VK_SUCCESS) {
2669 CreateNonDispatchableObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
2670 }
2671 }
2672 return result;
2673}
2674
2675VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2676 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2677 bool skip_call = false;
2678 {
2679 std::lock_guard<std::mutex> lock(global_lock);
2680 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2681 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2682 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2683 skip_call |= ValidateNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2684 }
2685 if (skip_call) {
2686 return VK_ERROR_VALIDATION_FAILED_EXT;
2687 }
2688 VkResult result = get_dispatch_table(ot_device_table_map, device)
2689 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2690 return result;
2691}
2692
2693VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2694 bool skip_call = false;
2695 {
2696 std::lock_guard<std::mutex> lock(global_lock);
2697 if (pPresentInfo) {
2698 if (pPresentInfo->pSwapchains) {
2699 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
2700 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pSwapchains[idx0],
2701 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2702 }
2703 }
2704 if (pPresentInfo->pWaitSemaphores) {
2705 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
2706 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pWaitSemaphores[idx1],
2707 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2708 }
2709 }
2710 }
2711 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
2712 }
2713 if (skip_call) {
2714 return VK_ERROR_VALIDATION_FAILED_EXT;
2715 }
2716 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2717 return result;
2718}
2719
2720#ifdef VK_USE_PLATFORM_WIN32_KHR
2721VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2722 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2723 bool skip_call = false;
2724 {
2725 std::lock_guard<std::mutex> lock(global_lock);
2726 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2727 }
2728 if (skip_call) {
2729 return VK_ERROR_VALIDATION_FAILED_EXT;
2730 }
2731 VkResult result =
2732 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2733 {
2734 std::lock_guard<std::mutex> lock(global_lock);
2735 if (result == VK_SUCCESS) {
2736 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2737 }
2738 }
2739 return result;
2740}
2741
2742VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2743 uint32_t queueFamilyIndex) {
2744 bool skip_call = false;
2745 {
2746 std::lock_guard<std::mutex> lock(global_lock);
2747 skip_call |=
2748 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2749 }
2750 if (skip_call) {
2751 return VK_FALSE;
2752 }
2753 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2754 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2755 return result;
2756}
2757#endif // VK_USE_PLATFORM_WIN32_KHR
2758
2759#ifdef VK_USE_PLATFORM_XCB_KHR
2760VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2761 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2762 bool skip_call = false;
2763 {
2764 std::lock_guard<std::mutex> lock(global_lock);
2765 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2766 }
2767 if (skip_call) {
2768 return VK_ERROR_VALIDATION_FAILED_EXT;
2769 }
2770 VkResult result =
2771 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2772 {
2773 std::lock_guard<std::mutex> lock(global_lock);
2774 if (result == VK_SUCCESS) {
2775 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2776 }
2777 }
2778 return result;
2779}
2780
2781VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2782 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2783 xcb_visualid_t visual_id) {
2784 bool skip_call = false;
2785 {
2786 std::lock_guard<std::mutex> lock(global_lock);
2787 skip_call |=
2788 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2789 }
2790 if (skip_call) {
2791 return VK_FALSE;
2792 }
2793 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2794 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2795 return result;
2796}
2797#endif // VK_USE_PLATFORM_XCB_KHR
2798
2799#ifdef VK_USE_PLATFORM_XLIB_KHR
2800VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2801 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2802 bool skip_call = false;
2803 {
2804 std::lock_guard<std::mutex> lock(global_lock);
2805 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2806 }
2807 if (skip_call) {
2808 return VK_ERROR_VALIDATION_FAILED_EXT;
2809 }
2810 VkResult result =
2811 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2812 {
2813 std::lock_guard<std::mutex> lock(global_lock);
2814 if (result == VK_SUCCESS) {
2815 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2816 }
2817 }
2818 return result;
2819}
2820
2821VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2822 uint32_t queueFamilyIndex, Display *dpy,
2823 VisualID visualID) {
2824 bool skip_call = false;
2825 {
2826 std::lock_guard<std::mutex> lock(global_lock);
2827 skip_call |=
2828 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2829 }
2830 if (skip_call) {
2831 return VK_FALSE;
2832 }
2833 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2834 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2835 return result;
2836}
2837#endif // VK_USE_PLATFORM_XLIB_KHR
2838
2839#ifdef VK_USE_PLATFORM_MIR_KHR
2840VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2841 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2842 bool skip_call = false;
2843 {
2844 std::lock_guard<std::mutex> lock(global_lock);
2845 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2846 }
2847 if (skip_call) {
2848 return VK_ERROR_VALIDATION_FAILED_EXT;
2849 }
2850 VkResult result =
2851 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2852 {
2853 std::lock_guard<std::mutex> lock(global_lock);
2854 if (result == VK_SUCCESS) {
2855 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2856 }
2857 }
2858 return result;
2859}
2860
2861VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2862 uint32_t queueFamilyIndex, MirConnection *connection) {
2863 bool skip_call = false;
2864 {
2865 std::lock_guard<std::mutex> lock(global_lock);
2866 skip_call |=
2867 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2868 }
2869 if (skip_call) {
2870 return VK_FALSE;
2871 }
2872 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2873 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2874 return result;
2875}
2876#endif // VK_USE_PLATFORM_MIR_KHR
2877
2878#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2879VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2880 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2881 bool skip_call = false;
2882 {
2883 std::lock_guard<std::mutex> lock(global_lock);
2884 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2885 }
2886 if (skip_call) {
2887 return VK_ERROR_VALIDATION_FAILED_EXT;
2888 }
2889 VkResult result =
2890 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2891 {
2892 std::lock_guard<std::mutex> lock(global_lock);
2893 if (result == VK_SUCCESS) {
2894 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2895 }
2896 }
2897 return result;
2898}
2899
2900VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2901 uint32_t queueFamilyIndex,
2902 struct wl_display *display) {
2903 bool skip_call = false;
2904 {
2905 std::lock_guard<std::mutex> lock(global_lock);
2906 skip_call |=
2907 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2908 }
2909 if (skip_call) {
2910 return VK_FALSE;
2911 }
2912 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2913 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2914 return result;
2915}
2916#endif // VK_USE_PLATFORM_WAYLAND_KHR
2917
2918#ifdef VK_USE_PLATFORM_ANDROID_KHR
2919VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2920 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2921 bool skip_call = false;
2922 {
2923 std::lock_guard<std::mutex> lock(global_lock);
2924 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2925 }
2926 if (skip_call) {
2927 return VK_ERROR_VALIDATION_FAILED_EXT;
2928 }
2929 VkResult result =
2930 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2931 {
2932 std::lock_guard<std::mutex> lock(global_lock);
2933 if (result == VK_SUCCESS) {
2934 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
2935 }
2936 }
2937 return result;
2938}
2939#endif // VK_USE_PLATFORM_ANDROID_KHR
2940
2941VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2942 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2943 const VkAllocationCallbacks *pAllocator,
2944 VkDebugReportCallbackEXT *pCallback) {
2945 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2946 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
2947 if (VK_SUCCESS == result) {
2948 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2949 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
2950 CreateNonDispatchableObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
2951 }
2952 return result;
2953}
2954
2955VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
2956 const VkAllocationCallbacks *pAllocator) {
2957 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2958 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
2959 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2960 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
2961 DestroyNonDispatchableObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
2962}
2963
2964VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
2965 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
2966 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
2967 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2968 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
2969}
2970
2971static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
2972
2973static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
2974 VK_LAYER_API_VERSION, // specVersion
2975 1, // implementationVersion
2976 "LunarG Validation Layer"};
2977
2978VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
2979 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2980}
2981
2982VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
2983 VkLayerProperties *pProperties) {
2984 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2985}
2986
2987VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
2988 VkExtensionProperties *pProperties) {
2989 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
2990 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
2991
2992 return VK_ERROR_LAYER_NOT_PRESENT;
2993}
2994
2995VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
2996 uint32_t *pCount, VkExtensionProperties *pProperties) {
2997 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
2998 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
2999
3000 assert(physicalDevice);
3001 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3002 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3003}
3004
3005static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3006 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3007 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3008}
3009
3010static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3011 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3012 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3013 return nullptr;
3014
3015 if (!strcmp("vkDestroySurfaceKHR", name))
3016 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3017 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3018 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3019 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3020 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3021 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3022 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3023 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3024 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3025
3026#ifdef VK_USE_PLATFORM_WIN32_KHR
3027 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3028 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3029 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3030 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3031#endif // VK_USE_PLATFORM_WIN32_KHR
3032#ifdef VK_USE_PLATFORM_XCB_KHR
3033 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("CreateXcbSurfaceKHR", name))
3034 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
3035 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("GetPhysicalDeviceXcbPresentationSupportKHR", name))
3036 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3037#endif // VK_USE_PLATFORM_XCB_KHR
3038#ifdef VK_USE_PLATFORM_XLIB_KHR
3039 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("CreateXlibSurfaceKHR", name))
3040 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
3041 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("GetPhysicalDeviceXlibPresentationSupportKHR", name))
3042 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3043#endif // VK_USE_PLATFORM_XLIB_KHR
3044#ifdef VK_USE_PLATFORM_MIR_KHR
3045 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("CreateMirSurfaceKHR", name))
3046 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
3047 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("GetPhysicalDeviceMirPresentationSupportKHR", name))
3048 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3049#endif // VK_USE_PLATFORM_MIR_KHR
3050#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3051 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("CreateWaylandSurfaceKHR", name))
3052 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
3053 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("GetPhysicalDeviceWaylandPresentationSupportKHR", name))
3054 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3055#endif // VK_USE_PLATFORM_WAYLAND_KHR
3056#ifdef VK_USE_PLATFORM_ANDROID_KHR
3057 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("CreateAndroidSurfaceKHR", name))
3058 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3059#endif // VK_USE_PLATFORM_ANDROID_KHR
3060
3061 return nullptr;
3062}
3063
3064static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3065 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3066 device_data->wsi_enabled = false;
3067
3068 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3069 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3070 device_data->wsi_enabled = true;
3071 }
3072 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3073 device_data->objtrack_extensions_enabled = true;
3074 }
3075 }
3076}
3077
3078static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3079 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3080
3081
3082 instanceExtMap[pDisp] = {};
3083
3084 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3085 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3086 instanceExtMap[pDisp].wsi_enabled = true;
3087 }
3088#ifdef VK_USE_PLATFORM_XLIB_KHR
3089 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3090 instanceExtMap[pDisp].xlib_enabled = true;
3091 }
3092#endif
3093#ifdef VK_USE_PLATFORM_XCB_KHR
3094 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3095 instanceExtMap[pDisp].xcb_enabled = true;
3096 }
3097#endif
3098#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3099 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3100 instanceExtMap[pDisp].wayland_enabled = true;
3101 }
3102#endif
3103#ifdef VK_USE_PLATFORM_MIR_KHR
3104 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3105 instanceExtMap[pDisp].mir_enabled = true;
3106 }
3107#endif
3108#ifdef VK_USE_PLATFORM_ANDROID_KHR
3109 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3110 instanceExtMap[pDisp].android_enabled = true;
3111 }
3112#endif
3113#ifdef VK_USE_PLATFORM_WIN32_KHR
3114 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3115 instanceExtMap[pDisp].win32_enabled = true;
3116 }
3117#endif
3118 }
3119}
3120
3121VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3122 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3123 std::lock_guard<std::mutex> lock(global_lock);
3124 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3125 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3126
3127 assert(chain_info->u.pLayerInfo);
3128 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3129 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3130 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3131 if (fpCreateDevice == NULL) {
3132 return VK_ERROR_INITIALIZATION_FAILED;
3133 }
3134
3135 // Advance the link info for the next element on the chain
3136 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3137
3138 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3139 if (result != VK_SUCCESS) {
3140 return result;
3141 }
3142
3143 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3144 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3145
3146 // Add link back to physDev
3147 device_data->physical_device = physicalDevice;
3148
3149 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3150
3151 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
3152 CreateDispatchableObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT);
3153
3154 return result;
3155}
3156
3157VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3158 uint32_t *pQueueFamilyPropertyCount,
3159 VkQueueFamilyProperties *pQueueFamilyProperties) {
3160 get_dispatch_table(ot_instance_table_map, physicalDevice)
3161 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3162 std::lock_guard<std::mutex> lock(global_lock);
3163 if (pQueueFamilyProperties != NULL) {
3164 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3165 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3166 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3167 }
3168 }
3169}
3170
3171VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3172 VkInstance *pInstance) {
3173 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3174
3175 assert(chain_info->u.pLayerInfo);
3176 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3177 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3178 if (fpCreateInstance == NULL) {
3179 return VK_ERROR_INITIALIZATION_FAILED;
3180 }
3181
3182 // Advance the link info for the next element on the chain
3183 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3184
3185 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3186 if (result != VK_SUCCESS) {
3187 return result;
3188 }
3189
3190 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3191 instance_data->instance = *pInstance;
3192 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3193 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3194
3195 // Look for one or more debug report create info structures, and copy the
3196 // callback(s) for each one found (for use by vkDestroyInstance)
3197 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3198 &instance_data->tmp_callbacks);
3199
3200 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3201 pCreateInfo->ppEnabledExtensionNames);
3202
3203 InitObjectTracker(instance_data, pAllocator);
3204 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3205
3206 CreateDispatchableObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
3207
3208 return result;
3209}
3210
3211VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3212 VkPhysicalDevice *pPhysicalDevices) {
3213 bool skip_call = VK_FALSE;
3214 std::unique_lock<std::mutex> lock(global_lock);
3215 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
3216 lock.unlock();
3217 if (skip_call) {
3218 return VK_ERROR_VALIDATION_FAILED_EXT;
3219 }
3220 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3221 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3222 lock.lock();
3223 if (result == VK_SUCCESS) {
3224 if (pPhysicalDevices) {
3225 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
3226 CreateDispatchableObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT);
3227 }
3228 }
3229 }
3230 lock.unlock();
3231 return result;
3232}
3233
3234VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3235 std::unique_lock<std::mutex> lock(global_lock);
3236 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3237 lock.unlock();
3238
3239 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3240
3241 lock.lock();
3242
3243 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3244 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3245}
3246
3247VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3248 std::unique_lock<std::mutex> lock(global_lock);
3249 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3250 lock.unlock();
3251
3252 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3253
3254 lock.lock();
3255 DestroyNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
3256}
3257
3258VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3259 VkMemoryMapFlags flags, void **ppData) {
3260 bool skip_call = VK_FALSE;
3261 std::unique_lock<std::mutex> lock(global_lock);
3262 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3263 lock.unlock();
3264 if (skip_call == VK_TRUE) {
3265 return VK_ERROR_VALIDATION_FAILED_EXT;
3266 }
3267 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3268 return result;
3269}
3270
3271VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3272 bool skip_call = VK_FALSE;
3273 std::unique_lock<std::mutex> lock(global_lock);
3274 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3275 lock.unlock();
3276 if (skip_call == VK_TRUE) {
3277 return;
3278 }
3279
3280 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3281}
3282VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3283 VkFence fence) {
3284 std::unique_lock<std::mutex> lock(global_lock);
3285 ValidateQueueFlags(queue, "QueueBindSparse");
3286
3287 for (uint32_t i = 0; i < bindInfoCount; i++) {
3288 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
3289 ValidateNonDispatchableObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3290 false);
3291 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
3292 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3293 false);
3294 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
3295 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
3296 }
3297 lock.unlock();
3298
3299 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3300 return result;
3301}
3302
3303VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3304 VkCommandBuffer *pCommandBuffers) {
3305 bool skip_call = VK_FALSE;
3306 std::unique_lock<std::mutex> lock(global_lock);
3307 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3308 skip_call |=
3309 ValidateNonDispatchableObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3310 lock.unlock();
3311
3312 if (skip_call) {
3313 return VK_ERROR_VALIDATION_FAILED_EXT;
3314 }
3315
3316 VkResult result =
3317 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3318
3319 lock.lock();
3320 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3321 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3322 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3323 }
3324 lock.unlock();
3325
3326 return result;
3327}
3328
3329VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3330 VkDescriptorSet *pDescriptorSets) {
3331 bool skip_call = VK_FALSE;
3332 std::unique_lock<std::mutex> lock(global_lock);
3333 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3334 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->descriptorPool,
3335 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3336 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3337 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->pSetLayouts[i],
3338 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3339 }
3340 lock.unlock();
3341 if (skip_call) {
3342 return VK_ERROR_VALIDATION_FAILED_EXT;
3343 }
3344
3345 VkResult result =
3346 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3347
3348 if (VK_SUCCESS == result) {
3349 lock.lock();
3350 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3351 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3352 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3353 }
3354 lock.unlock();
3355 }
3356
3357 return result;
3358}
3359
3360VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3361 const VkCommandBuffer *pCommandBuffers) {
3362 bool skip_call = false;
3363 std::unique_lock<std::mutex> lock(global_lock);
3364 ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3365 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3366 for (uint32_t i = 0; i < commandBufferCount; i++) {
3367 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3368 }
3369
3370 lock.unlock();
3371 if (!skip_call) {
3372 get_dispatch_table(ot_device_table_map, device)
3373 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3374 }
3375
3376 lock.lock();
3377 for (uint32_t i = 0; i < commandBufferCount; i++) {
3378 DestroyDispatchableObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
3379 }
3380}
3381VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3382 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3383 std::unique_lock<std::mutex> lock(global_lock);
3384 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3385 // Remove this swapchain's images from our map of such images.
3386 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3387 while (itr != device_data->swapchainImageMap.end()) {
3388 OBJTRACK_NODE *pNode = (*itr).second;
3389 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3390 delete pNode;
3391 auto delete_item = itr++;
3392 device_data->swapchainImageMap.erase(delete_item);
3393 } else {
3394 ++itr;
3395 }
3396 }
3397 DestroyNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
3398 lock.unlock();
3399
3400 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3401}
3402
3403VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3404 const VkDescriptorSet *pDescriptorSets) {
3405 bool skip_call = false;
3406 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3407 std::unique_lock<std::mutex> lock(global_lock);
3408 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3409 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3410 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3411 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3412 }
3413
3414 lock.unlock();
3415 if (!skip_call) {
3416 result = get_dispatch_table(ot_device_table_map, device)
3417 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3418 }
3419
3420 lock.lock();
3421 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3422 DestroyNonDispatchableObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3423 }
3424 return result;
3425}
3426
3427VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3428 const VkAllocationCallbacks *pAllocator) {
3429 bool skip_call = VK_FALSE;
3430 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3431 std::unique_lock<std::mutex> lock(global_lock);
3432 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3433 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3434 lock.unlock();
3435 if (skip_call) {
3436 return;
3437 }
3438 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3439 // Remove this pool's descriptor sets from our descriptorSet map.
3440 lock.lock();
3441 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3442 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3443 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3444 OBJTRACK_NODE *pNode = (*itr).second;
3445 auto del_itr = itr++;
3446 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
3447 DestroyNonDispatchableObject(device, (VkDescriptorSet)((*del_itr).first),
3448 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3449 }
3450 }
3451 DestroyNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
3452 lock.unlock();
3453 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3454}
3455
3456VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3457 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3458 bool skip_call = false;
3459 std::unique_lock<std::mutex> lock(global_lock);
3460 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3461 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3462 lock.unlock();
3463 if (skip_call) {
3464 return;
3465 }
3466 lock.lock();
3467 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3468 // Remove this pool's cmdBuffers from our cmd buffer map.
3469 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3470 auto del_itr = itr;
3471 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3472 OBJTRACK_NODE *pNode = (*itr).second;
3473 del_itr = itr++;
3474 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3475 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
3476 DestroyDispatchableObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
3477 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
3478 }
3479 }
3480 DestroyNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
3481 lock.unlock();
3482 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3483}
3484
3485VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3486 VkImage *pSwapchainImages) {
3487 bool skip_call = VK_FALSE;
3488 std::unique_lock<std::mutex> lock(global_lock);
3489 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3490 lock.unlock();
3491 if (skip_call) {
3492 return VK_ERROR_VALIDATION_FAILED_EXT;
3493 }
3494 VkResult result = get_dispatch_table(ot_device_table_map, device)
3495 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3496 if (pSwapchainImages != NULL) {
3497 lock.lock();
3498 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3499 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3500 }
3501 lock.unlock();
3502 }
3503 return result;
3504}
3505
3506VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3507 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3508 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3509 bool skip_call = VK_FALSE;
3510 std::unique_lock<std::mutex> lock(global_lock);
3511 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3512 if (pCreateInfos) {
3513 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3514 if (pCreateInfos[idx0].basePipelineHandle) {
3515 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3516 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3517 }
3518 if (pCreateInfos[idx0].layout) {
3519 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3520 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3521 }
3522 if (pCreateInfos[idx0].pStages) {
3523 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3524 if (pCreateInfos[idx0].pStages[idx1].module) {
3525 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].pStages[idx1].module,
3526 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3527 }
3528 }
3529 }
3530 if (pCreateInfos[idx0].renderPass) {
3531 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].renderPass,
3532 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3533 }
3534 }
3535 }
3536 if (pipelineCache) {
3537 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3538 }
3539 lock.unlock();
3540 if (skip_call) {
3541 return VK_ERROR_VALIDATION_FAILED_EXT;
3542 }
3543 VkResult result = get_dispatch_table(ot_device_table_map, device)
3544 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3545 lock.lock();
3546 if (result == VK_SUCCESS) {
3547 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
3548 CreateNonDispatchableObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
3549 }
3550 }
3551 lock.unlock();
3552 return result;
3553}
3554
3555VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3556 const VkComputePipelineCreateInfo *pCreateInfos,
3557 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3558 bool skip_call = VK_FALSE;
3559 std::unique_lock<std::mutex> lock(global_lock);
3560 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3561 if (pCreateInfos) {
3562 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3563 if (pCreateInfos[idx0].basePipelineHandle) {
3564 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3565 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3566 }
3567 if (pCreateInfos[idx0].layout) {
3568 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3569 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3570 }
3571 if (pCreateInfos[idx0].stage.module) {
3572 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].stage.module,
3573 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3574 }
3575 }
3576 }
3577 if (pipelineCache) {
3578 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3579 }
3580 lock.unlock();
3581 if (skip_call) {
3582 return VK_ERROR_VALIDATION_FAILED_EXT;
3583 }
3584 VkResult result = get_dispatch_table(ot_device_table_map, device)
3585 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3586 lock.lock();
3587 if (result == VK_SUCCESS) {
3588 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
3589 CreateNonDispatchableObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
3590 }
3591 }
3592 lock.unlock();
3593 return result;
3594}
3595
3596static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3597 if (!name || name[0] != 'v' || name[1] != 'k')
3598 return NULL;
3599
3600 name += 2;
3601 if (!strcmp(name, "GetDeviceProcAddr"))
3602 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3603 if (!strcmp(name, "DestroyDevice"))
3604 return (PFN_vkVoidFunction)DestroyDevice;
3605 if (!strcmp(name, "GetDeviceQueue"))
3606 return (PFN_vkVoidFunction)GetDeviceQueue;
3607 if (!strcmp(name, "QueueSubmit"))
3608 return (PFN_vkVoidFunction)QueueSubmit;
3609 if (!strcmp(name, "QueueWaitIdle"))
3610 return (PFN_vkVoidFunction)QueueWaitIdle;
3611 if (!strcmp(name, "DeviceWaitIdle"))
3612 return (PFN_vkVoidFunction)DeviceWaitIdle;
3613 if (!strcmp(name, "AllocateMemory"))
3614 return (PFN_vkVoidFunction)AllocateMemory;
3615 if (!strcmp(name, "FreeMemory"))
3616 return (PFN_vkVoidFunction)FreeMemory;
3617 if (!strcmp(name, "MapMemory"))
3618 return (PFN_vkVoidFunction)MapMemory;
3619 if (!strcmp(name, "UnmapMemory"))
3620 return (PFN_vkVoidFunction)UnmapMemory;
3621 if (!strcmp(name, "FlushMappedMemoryRanges"))
3622 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3623 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3624 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3625 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3626 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3627 if (!strcmp(name, "BindBufferMemory"))
3628 return (PFN_vkVoidFunction)BindBufferMemory;
3629 if (!strcmp(name, "BindImageMemory"))
3630 return (PFN_vkVoidFunction)BindImageMemory;
3631 if (!strcmp(name, "GetBufferMemoryRequirements"))
3632 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3633 if (!strcmp(name, "GetImageMemoryRequirements"))
3634 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3635 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3636 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3637 if (!strcmp(name, "QueueBindSparse"))
3638 return (PFN_vkVoidFunction)QueueBindSparse;
3639 if (!strcmp(name, "CreateFence"))
3640 return (PFN_vkVoidFunction)CreateFence;
3641 if (!strcmp(name, "DestroyFence"))
3642 return (PFN_vkVoidFunction)DestroyFence;
3643 if (!strcmp(name, "ResetFences"))
3644 return (PFN_vkVoidFunction)ResetFences;
3645 if (!strcmp(name, "GetFenceStatus"))
3646 return (PFN_vkVoidFunction)GetFenceStatus;
3647 if (!strcmp(name, "WaitForFences"))
3648 return (PFN_vkVoidFunction)WaitForFences;
3649 if (!strcmp(name, "CreateSemaphore"))
3650 return (PFN_vkVoidFunction)CreateSemaphore;
3651 if (!strcmp(name, "DestroySemaphore"))
3652 return (PFN_vkVoidFunction)DestroySemaphore;
3653 if (!strcmp(name, "CreateEvent"))
3654 return (PFN_vkVoidFunction)CreateEvent;
3655 if (!strcmp(name, "DestroyEvent"))
3656 return (PFN_vkVoidFunction)DestroyEvent;
3657 if (!strcmp(name, "GetEventStatus"))
3658 return (PFN_vkVoidFunction)GetEventStatus;
3659 if (!strcmp(name, "SetEvent"))
3660 return (PFN_vkVoidFunction)SetEvent;
3661 if (!strcmp(name, "ResetEvent"))
3662 return (PFN_vkVoidFunction)ResetEvent;
3663 if (!strcmp(name, "CreateQueryPool"))
3664 return (PFN_vkVoidFunction)CreateQueryPool;
3665 if (!strcmp(name, "DestroyQueryPool"))
3666 return (PFN_vkVoidFunction)DestroyQueryPool;
3667 if (!strcmp(name, "GetQueryPoolResults"))
3668 return (PFN_vkVoidFunction)GetQueryPoolResults;
3669 if (!strcmp(name, "CreateBuffer"))
3670 return (PFN_vkVoidFunction)CreateBuffer;
3671 if (!strcmp(name, "DestroyBuffer"))
3672 return (PFN_vkVoidFunction)DestroyBuffer;
3673 if (!strcmp(name, "CreateBufferView"))
3674 return (PFN_vkVoidFunction)CreateBufferView;
3675 if (!strcmp(name, "DestroyBufferView"))
3676 return (PFN_vkVoidFunction)DestroyBufferView;
3677 if (!strcmp(name, "CreateImage"))
3678 return (PFN_vkVoidFunction)CreateImage;
3679 if (!strcmp(name, "DestroyImage"))
3680 return (PFN_vkVoidFunction)DestroyImage;
3681 if (!strcmp(name, "GetImageSubresourceLayout"))
3682 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3683 if (!strcmp(name, "CreateImageView"))
3684 return (PFN_vkVoidFunction)CreateImageView;
3685 if (!strcmp(name, "DestroyImageView"))
3686 return (PFN_vkVoidFunction)DestroyImageView;
3687 if (!strcmp(name, "CreateShaderModule"))
3688 return (PFN_vkVoidFunction)CreateShaderModule;
3689 if (!strcmp(name, "DestroyShaderModule"))
3690 return (PFN_vkVoidFunction)DestroyShaderModule;
3691 if (!strcmp(name, "CreatePipelineCache"))
3692 return (PFN_vkVoidFunction)CreatePipelineCache;
3693 if (!strcmp(name, "DestroyPipelineCache"))
3694 return (PFN_vkVoidFunction)DestroyPipelineCache;
3695 if (!strcmp(name, "GetPipelineCacheData"))
3696 return (PFN_vkVoidFunction)GetPipelineCacheData;
3697 if (!strcmp(name, "MergePipelineCaches"))
3698 return (PFN_vkVoidFunction)MergePipelineCaches;
3699 if (!strcmp(name, "CreateGraphicsPipelines"))
3700 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3701 if (!strcmp(name, "CreateComputePipelines"))
3702 return (PFN_vkVoidFunction)CreateComputePipelines;
3703 if (!strcmp(name, "DestroyPipeline"))
3704 return (PFN_vkVoidFunction)DestroyPipeline;
3705 if (!strcmp(name, "CreatePipelineLayout"))
3706 return (PFN_vkVoidFunction)CreatePipelineLayout;
3707 if (!strcmp(name, "DestroyPipelineLayout"))
3708 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3709 if (!strcmp(name, "CreateSampler"))
3710 return (PFN_vkVoidFunction)CreateSampler;
3711 if (!strcmp(name, "DestroySampler"))
3712 return (PFN_vkVoidFunction)DestroySampler;
3713 if (!strcmp(name, "CreateDescriptorSetLayout"))
3714 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3715 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3716 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3717 if (!strcmp(name, "CreateDescriptorPool"))
3718 return (PFN_vkVoidFunction)CreateDescriptorPool;
3719 if (!strcmp(name, "DestroyDescriptorPool"))
3720 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3721 if (!strcmp(name, "ResetDescriptorPool"))
3722 return (PFN_vkVoidFunction)ResetDescriptorPool;
3723 if (!strcmp(name, "AllocateDescriptorSets"))
3724 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3725 if (!strcmp(name, "FreeDescriptorSets"))
3726 return (PFN_vkVoidFunction)FreeDescriptorSets;
3727 if (!strcmp(name, "UpdateDescriptorSets"))
3728 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3729 if (!strcmp(name, "CreateFramebuffer"))
3730 return (PFN_vkVoidFunction)CreateFramebuffer;
3731 if (!strcmp(name, "DestroyFramebuffer"))
3732 return (PFN_vkVoidFunction)DestroyFramebuffer;
3733 if (!strcmp(name, "CreateRenderPass"))
3734 return (PFN_vkVoidFunction)CreateRenderPass;
3735 if (!strcmp(name, "DestroyRenderPass"))
3736 return (PFN_vkVoidFunction)DestroyRenderPass;
3737 if (!strcmp(name, "GetRenderAreaGranularity"))
3738 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3739 if (!strcmp(name, "CreateCommandPool"))
3740 return (PFN_vkVoidFunction)CreateCommandPool;
3741 if (!strcmp(name, "DestroyCommandPool"))
3742 return (PFN_vkVoidFunction)DestroyCommandPool;
3743 if (!strcmp(name, "ResetCommandPool"))
3744 return (PFN_vkVoidFunction)ResetCommandPool;
3745 if (!strcmp(name, "AllocateCommandBuffers"))
3746 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3747 if (!strcmp(name, "FreeCommandBuffers"))
3748 return (PFN_vkVoidFunction)FreeCommandBuffers;
3749 if (!strcmp(name, "BeginCommandBuffer"))
3750 return (PFN_vkVoidFunction)BeginCommandBuffer;
3751 if (!strcmp(name, "EndCommandBuffer"))
3752 return (PFN_vkVoidFunction)EndCommandBuffer;
3753 if (!strcmp(name, "ResetCommandBuffer"))
3754 return (PFN_vkVoidFunction)ResetCommandBuffer;
3755 if (!strcmp(name, "CmdBindPipeline"))
3756 return (PFN_vkVoidFunction)CmdBindPipeline;
3757 if (!strcmp(name, "CmdSetViewport"))
3758 return (PFN_vkVoidFunction)CmdSetViewport;
3759 if (!strcmp(name, "CmdSetScissor"))
3760 return (PFN_vkVoidFunction)CmdSetScissor;
3761 if (!strcmp(name, "CmdSetLineWidth"))
3762 return (PFN_vkVoidFunction)CmdSetLineWidth;
3763 if (!strcmp(name, "CmdSetDepthBias"))
3764 return (PFN_vkVoidFunction)CmdSetDepthBias;
3765 if (!strcmp(name, "CmdSetBlendConstants"))
3766 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3767 if (!strcmp(name, "CmdSetDepthBounds"))
3768 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3769 if (!strcmp(name, "CmdSetStencilCompareMask"))
3770 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3771 if (!strcmp(name, "CmdSetStencilWriteMask"))
3772 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3773 if (!strcmp(name, "CmdSetStencilReference"))
3774 return (PFN_vkVoidFunction)CmdSetStencilReference;
3775 if (!strcmp(name, "CmdBindDescriptorSets"))
3776 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3777 if (!strcmp(name, "CmdBindIndexBuffer"))
3778 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3779 if (!strcmp(name, "CmdBindVertexBuffers"))
3780 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3781 if (!strcmp(name, "CmdDraw"))
3782 return (PFN_vkVoidFunction)CmdDraw;
3783 if (!strcmp(name, "CmdDrawIndexed"))
3784 return (PFN_vkVoidFunction)CmdDrawIndexed;
3785 if (!strcmp(name, "CmdDrawIndirect"))
3786 return (PFN_vkVoidFunction)CmdDrawIndirect;
3787 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3788 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3789 if (!strcmp(name, "CmdDispatch"))
3790 return (PFN_vkVoidFunction)CmdDispatch;
3791 if (!strcmp(name, "CmdDispatchIndirect"))
3792 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3793 if (!strcmp(name, "CmdCopyBuffer"))
3794 return (PFN_vkVoidFunction)CmdCopyBuffer;
3795 if (!strcmp(name, "CmdCopyImage"))
3796 return (PFN_vkVoidFunction)CmdCopyImage;
3797 if (!strcmp(name, "CmdBlitImage"))
3798 return (PFN_vkVoidFunction)CmdBlitImage;
3799 if (!strcmp(name, "CmdCopyBufferToImage"))
3800 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3801 if (!strcmp(name, "CmdCopyImageToBuffer"))
3802 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3803 if (!strcmp(name, "CmdUpdateBuffer"))
3804 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3805 if (!strcmp(name, "CmdFillBuffer"))
3806 return (PFN_vkVoidFunction)CmdFillBuffer;
3807 if (!strcmp(name, "CmdClearColorImage"))
3808 return (PFN_vkVoidFunction)CmdClearColorImage;
3809 if (!strcmp(name, "CmdClearDepthStencilImage"))
3810 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3811 if (!strcmp(name, "CmdClearAttachments"))
3812 return (PFN_vkVoidFunction)CmdClearAttachments;
3813 if (!strcmp(name, "CmdResolveImage"))
3814 return (PFN_vkVoidFunction)CmdResolveImage;
3815 if (!strcmp(name, "CmdSetEvent"))
3816 return (PFN_vkVoidFunction)CmdSetEvent;
3817 if (!strcmp(name, "CmdResetEvent"))
3818 return (PFN_vkVoidFunction)CmdResetEvent;
3819 if (!strcmp(name, "CmdWaitEvents"))
3820 return (PFN_vkVoidFunction)CmdWaitEvents;
3821 if (!strcmp(name, "CmdPipelineBarrier"))
3822 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3823 if (!strcmp(name, "CmdBeginQuery"))
3824 return (PFN_vkVoidFunction)CmdBeginQuery;
3825 if (!strcmp(name, "CmdEndQuery"))
3826 return (PFN_vkVoidFunction)CmdEndQuery;
3827 if (!strcmp(name, "CmdResetQueryPool"))
3828 return (PFN_vkVoidFunction)CmdResetQueryPool;
3829 if (!strcmp(name, "CmdWriteTimestamp"))
3830 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3831 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3832 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3833 if (!strcmp(name, "CmdPushConstants"))
3834 return (PFN_vkVoidFunction)CmdPushConstants;
3835 if (!strcmp(name, "CmdBeginRenderPass"))
3836 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3837 if (!strcmp(name, "CmdNextSubpass"))
3838 return (PFN_vkVoidFunction)CmdNextSubpass;
3839 if (!strcmp(name, "CmdEndRenderPass"))
3840 return (PFN_vkVoidFunction)CmdEndRenderPass;
3841 if (!strcmp(name, "CmdExecuteCommands"))
3842 return (PFN_vkVoidFunction)CmdExecuteCommands;
3843
3844 return NULL;
3845}
3846static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3847 if (!name || name[0] != 'v' || name[1] != 'k')
3848 return NULL;
3849
3850 name += 2;
3851 if (!strcmp(name, "CreateInstance"))
3852 return (PFN_vkVoidFunction)CreateInstance;
3853 if (!strcmp(name, "DestroyInstance"))
3854 return (PFN_vkVoidFunction)DestroyInstance;
3855 if (!strcmp(name, "EnumeratePhysicalDevices"))
3856 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
3857 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
3858 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
3859 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
3860 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
3861 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
3862 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
3863 if (!strcmp(name, "GetPhysicalDeviceProperties"))
3864 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
3865 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
3866 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
3867 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
3868 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
3869 if (!strcmp(name, "GetInstanceProcAddr"))
3870 return (PFN_vkVoidFunction)GetInstanceProcAddr;
3871 if (!strcmp(name, "CreateDevice"))
3872 return (PFN_vkVoidFunction)CreateDevice;
3873 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
3874 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
3875 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
3876 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
3877 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
3878 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
3879 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
3880 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
3881
3882 return NULL;
3883}
3884
3885static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
3886 if (device) {
3887 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3888 if (!device_data->wsi_enabled)
3889 return nullptr;
3890 }
3891 if (!strcmp("vkCreateSwapchainKHR", name))
3892 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
3893 if (!strcmp("vkDestroySwapchainKHR", name))
3894 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
3895 if (!strcmp("vkGetSwapchainImagesKHR", name))
3896 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
3897 if (!strcmp("vkAcquireNextImageKHR", name))
3898 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
3899 if (!strcmp("vkQueuePresentKHR", name))
3900 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
3901
3902 return nullptr;
3903}
3904
3905VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
3906 PFN_vkVoidFunction addr;
3907 addr = InterceptCoreDeviceCommand(funcName);
3908 if (addr) {
3909 return addr;
3910 }
3911 assert(device);
3912
3913 addr = InterceptWsiEnabledCommand(funcName, device);
3914 if (addr) {
3915 return addr;
3916 }
3917 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
3918 return NULL;
3919 }
3920 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
3921}
3922
3923VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
3924 PFN_vkVoidFunction addr;
3925 addr = InterceptCoreInstanceCommand(funcName);
3926 if (!addr) {
3927 addr = InterceptCoreDeviceCommand(funcName);
3928 }
3929 if (!addr) {
3930 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
3931 }
3932 if (addr) {
3933 return addr;
3934 }
3935 assert(instance);
3936
3937 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
3938 if (addr) {
3939 return addr;
3940 }
3941 addr = InterceptWsiEnabledCommand(funcName, instance);
3942 if (addr) {
3943 return addr;
3944 }
3945 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
3946 return NULL;
3947 }
3948 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
3949}
3950
3951} // namespace object_tracker
3952
3953// vk_layer_logging.h expects these to be defined
3954VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
3955 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
3956 const VkAllocationCallbacks *pAllocator,
3957 VkDebugReportCallbackEXT *pMsgCallback) {
3958 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
3959}
3960
3961VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
3962 const VkAllocationCallbacks *pAllocator) {
3963 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
3964}
3965
3966VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
3967 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
3968 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
3969 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
3970}
3971
3972// Loader-layer interface v0, just wrappers since there is only a layer
3973VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3974 VkExtensionProperties *pProperties) {
3975 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
3976}
3977
3978VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
3979 VkLayerProperties *pProperties) {
3980 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
3981}
3982
3983VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
3984 VkLayerProperties *pProperties) {
3985 // The layer command handles VK_NULL_HANDLE just fine internally
3986 assert(physicalDevice == VK_NULL_HANDLE);
3987 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
3988}
3989
3990VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
3991 return object_tracker::GetDeviceProcAddr(dev, funcName);
3992}
3993
3994VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
3995 return object_tracker::GetInstanceProcAddr(instance, funcName);
3996}
3997
3998VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
3999 const char *pLayerName, uint32_t *pCount,
4000 VkExtensionProperties *pProperties) {
4001 // The layer command handles VK_NULL_HANDLE just fine internally
4002 assert(physicalDevice == VK_NULL_HANDLE);
4003 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
4004}