blob: a358cc4ea8dd0f7cf3e044602e2c068c89dcf205 [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600155 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
156 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64
157 ". Was it created? Has it already been destroyed?",
158 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600200 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
201 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64
202 ". Was it created? Has it already been destroyed?",
203 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
245template <typename T1, typename T2>
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300246static void CreateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600247 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
248
249 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
250 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
251 object_name[object_type], reinterpret_cast<uint64_t>(object));
252
253 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
254 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300255 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600256 pNewObjNode->handle = reinterpret_cast<uint64_t>(object);
257 instance_data->object_map[object_type][reinterpret_cast<uint64_t>(object)] = pNewObjNode;
258 instance_data->num_objects[object_type]++;
259 instance_data->num_total_objects++;
260}
261
262template <typename T1, typename T2>
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300263static void CreateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600264 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
265
266 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t &>(object),
267 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
268 object_name[object_type], reinterpret_cast<uint64_t &>(object));
269
270 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
271 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300272 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600273 pNewObjNode->handle = reinterpret_cast<uint64_t &>(object);
274 device_data->object_map[object_type][reinterpret_cast<uint64_t &>(object)] = pNewObjNode;
275 device_data->num_objects[object_type]++;
276 device_data->num_total_objects++;
277}
278
279template <typename T1, typename T2>
Chris Forbes3e51a202016-09-29 14:35:09 +1300280static void DestroyDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600281 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
282
283 uint64_t object_handle = reinterpret_cast<uint64_t>(object);
284
Chris Forbes3e51a202016-09-29 14:35:09 +1300285 // TODO: This function produces errors, but doesn't compose well with either
286 // a call down the chain or its own state updates
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600287 auto item = instance_data->object_map[object_type].find(object_handle);
288 if (item != instance_data->object_map[object_type].end()) {
289
290 OBJTRACK_NODE *pNode = item->second;
291 assert(instance_data->num_total_objects > 0);
292 instance_data->num_total_objects--;
293 assert(instance_data->num_objects[object_type] > 0);
294 instance_data->num_objects[pNode->object_type]--;
295
296 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
297 OBJTRACK_NONE, LayerName,
298 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
299 object_name[pNode->object_type], reinterpret_cast<uint64_t>(object), instance_data->num_total_objects,
300 instance_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
301
Chris Forbes3e51a202016-09-29 14:35:09 +1300302 auto allocated_with_custom = pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR;
303 if (custom_allocator ^ allocated_with_custom) {
304 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__,
305 OBJTRACK_ALLOCATOR_MISMATCH, LayerName,
306 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation",
307 (custom_allocator ? "" : "not "), object_name[object_type], object_handle,
308 (allocated_with_custom ? "" : "not "));
309 }
310
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600311 delete pNode;
312 instance_data->object_map[object_type].erase(item);
313 } else {
314 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
315 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600316 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
317 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600318 }
319}
320
321template <typename T1, typename T2>
Chris Forbes3e51a202016-09-29 14:35:09 +1300322static void DestroyNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600323 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
324
325 uint64_t object_handle = reinterpret_cast<uint64_t &>(object);
326
327 auto item = device_data->object_map[object_type].find(object_handle);
328 if (item != device_data->object_map[object_type].end()) {
329
330 OBJTRACK_NODE *pNode = item->second;
331 assert(device_data->num_total_objects > 0);
332 device_data->num_total_objects--;
333 assert(device_data->num_objects[pNode->object_type] > 0);
334 device_data->num_objects[pNode->object_type]--;
335
336 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
337 OBJTRACK_NONE, LayerName,
338 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
339 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
340 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
341
Chris Forbes3e51a202016-09-29 14:35:09 +1300342 auto allocated_with_custom = pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR;
343 if (custom_allocator ^ allocated_with_custom) {
344 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__,
345 OBJTRACK_ALLOCATOR_MISMATCH, LayerName,
346 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation",
347 (custom_allocator ? "" : "not "), object_name[object_type], object_handle,
348 (allocated_with_custom ? "" : "not "));
349 }
350
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600351 delete pNode;
352 device_data->object_map[object_type].erase(item);
353 } else {
354 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
355 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600356 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
357 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600358 }
359}
360
361template <typename T1, typename T2>
362static bool ValidateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
363 bool null_allowed) {
364 if (null_allowed && (object == VK_NULL_HANDLE)) {
365 return false;
366 }
367 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
368
369 if (instance_data->object_map[object_type].find(reinterpret_cast<uint64_t>(object)) ==
370 instance_data->object_map[object_type].end()) {
371 return log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600372 __LINE__, OBJTRACK_INVALID_OBJECT, LayerName, "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600373 reinterpret_cast<uint64_t>(object));
374 }
375 return false;
376}
377
378template <typename T1, typename T2>
379static bool ValidateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
380 bool null_allowed) {
381 if (null_allowed && (object == VK_NULL_HANDLE)) {
382 return false;
383 }
384 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
385 if (device_data->object_map[object_type].find(reinterpret_cast<uint64_t &>(object)) ==
386 device_data->object_map[object_type].end()) {
387 // If object is an image, also look for it in the swapchain image map
388 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
389 (device_data->swapchainImageMap.find(reinterpret_cast<uint64_t &>(object)) == device_data->swapchainImageMap.end())) {
390 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
391 reinterpret_cast<uint64_t &>(object), __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600392 "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type], reinterpret_cast<uint64_t &>(object));
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600393 }
394 }
395 return false;
396}
397
398static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
399 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
400 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
401 OBJTRACK_NODE *object_info = item->second;
402 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
403 OBJTRACK_OBJECT_LEAK, LayerName,
404 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
405 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
406 item = device_data->object_map[object_type].erase(item);
407 }
408}
409
410VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
411 std::unique_lock<std::mutex> lock(global_lock);
412
413 dispatch_key key = get_dispatch_key(instance);
414 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
415
416 // Enable the temporary callback(s) here to catch cleanup issues:
417 bool callback_setup = false;
418 if (instance_data->num_tmp_callbacks > 0) {
419 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
420 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
421 callback_setup = true;
422 }
423 }
424
425 ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
426
Chris Forbes3e51a202016-09-29 14:35:09 +1300427 DestroyDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600428 // Report any remaining objects in LL
429
430 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
431 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
432 OBJTRACK_NODE *pNode = iit->second;
433
434 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
435
436 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
437 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
438 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
439 // Semaphore:
440 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
441 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
442 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
443 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
444 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
445 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
446 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
447 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
448 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
449 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
450 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
451 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
452 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
453 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
454 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
455 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
456 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
457 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
458 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
459 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
460 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
461 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
462 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
463 }
464 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
465
466 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
467 pInstanceTable->DestroyInstance(instance, pAllocator);
468
469 // Disable and cleanup the temporary callback(s):
470 if (callback_setup) {
471 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
472 }
473 if (instance_data->num_tmp_callbacks > 0) {
474 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
475 instance_data->num_tmp_callbacks = 0;
476 }
477
478 // Clean up logging callback, if any
479 while (instance_data->logging_callback.size() > 0) {
480 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
481 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
482 instance_data->logging_callback.pop_back();
483 }
484
485 layer_debug_report_destroy_instance(instance_data->report_data);
486 layer_data_map.erase(key);
487
488 instanceExtMap.erase(pInstanceTable);
489 lock.unlock();
490 ot_instance_table_map.erase(key);
491}
492
493VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
494
495 std::unique_lock<std::mutex> lock(global_lock);
496 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Chris Forbes3e51a202016-09-29 14:35:09 +1300497 DestroyDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600498
499 // Report any remaining objects associated with this VkDevice object in LL
500 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
501 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
502 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
503 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
504 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
505 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
506 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
507 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
508 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
509 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
510 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
511 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
512 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
513 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
514 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
515 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
516 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
517 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
518 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
519 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
520 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
521 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
522 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
523
524 // Clean up Queue's MemRef Linked Lists
525 DestroyQueueDataStructures(device);
526
527 lock.unlock();
528
529 dispatch_key key = get_dispatch_key(device);
530 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
531 pDisp->DestroyDevice(device, pAllocator);
532 ot_device_table_map.erase(key);
533}
534
535VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
536 bool skip_call = false;
537 {
538 std::lock_guard<std::mutex> lock(global_lock);
539 skip_call |=
540 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
541 }
542 if (skip_call) {
543 return;
544 }
545 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
546}
547
548VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
549 VkFormatProperties *pFormatProperties) {
550 bool skip_call = false;
551 {
552 std::lock_guard<std::mutex> lock(global_lock);
553 skip_call |=
554 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
555 }
556 if (skip_call) {
557 return;
558 }
559 get_dispatch_table(ot_instance_table_map, physicalDevice)
560 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
561}
562
563VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
564 VkImageType type, VkImageTiling tiling,
565 VkImageUsageFlags usage, VkImageCreateFlags flags,
566 VkImageFormatProperties *pImageFormatProperties) {
567 bool skip_call = false;
568 {
569 std::lock_guard<std::mutex> lock(global_lock);
570 skip_call |=
571 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
572 }
573 if (skip_call) {
574 return VK_ERROR_VALIDATION_FAILED_EXT;
575 }
576 VkResult result =
577 get_dispatch_table(ot_instance_table_map, physicalDevice)
578 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
579 return result;
580}
581
582VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
583 bool skip_call = false;
584 {
585 std::lock_guard<std::mutex> lock(global_lock);
586 skip_call |=
587 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
588 }
589 if (skip_call) {
590 return;
591 }
592 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
593}
594
595VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
596 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
597 bool skip_call = false;
598 {
599 std::lock_guard<std::mutex> lock(global_lock);
600 skip_call |=
601 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
602 }
603 if (skip_call) {
604 return;
605 }
606 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
607}
608
609VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
610
611VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
612
613VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
614 VkExtensionProperties *pProperties);
615
616VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
617
618VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
619 VkLayerProperties *pProperties);
620
621VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
622 bool skip_call = false;
623 {
624 std::lock_guard<std::mutex> lock(global_lock);
625 skip_call |= ValidateNonDispatchableObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
626 if (pSubmits) {
627 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
628 if (pSubmits[idx0].pCommandBuffers) {
629 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
630 skip_call |= ValidateDispatchableObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
631 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
632 }
633 }
634 if (pSubmits[idx0].pSignalSemaphores) {
635 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
636 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
637 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
638 }
639 }
640 if (pSubmits[idx0].pWaitSemaphores) {
641 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
642 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
643 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
644 }
645 }
646 }
647 }
648 if (queue) {
649 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
650 }
651 }
652 if (skip_call) {
653 return VK_ERROR_VALIDATION_FAILED_EXT;
654 }
655 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
656 return result;
657}
658
659VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
660 bool skip_call = false;
661 {
662 std::lock_guard<std::mutex> lock(global_lock);
663 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
664 }
665 if (skip_call) {
666 return VK_ERROR_VALIDATION_FAILED_EXT;
667 }
668 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
669 return result;
670}
671
672VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
673 bool skip_call = false;
674 {
675 std::lock_guard<std::mutex> lock(global_lock);
676 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
677 }
678 if (skip_call) {
679 return VK_ERROR_VALIDATION_FAILED_EXT;
680 }
681 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
682 return result;
683}
684
685VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
686 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
687 bool skip_call = false;
688 {
689 std::lock_guard<std::mutex> lock(global_lock);
690 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
691 }
692 if (skip_call) {
693 return VK_ERROR_VALIDATION_FAILED_EXT;
694 }
695 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
696 {
697 std::lock_guard<std::mutex> lock(global_lock);
698 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300699 CreateNonDispatchableObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600700 }
701 }
702 return result;
703}
704
705VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
706 const VkMappedMemoryRange *pMemoryRanges) {
707 bool skip_call = false;
708 {
709 std::lock_guard<std::mutex> lock(global_lock);
710 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
711 if (pMemoryRanges) {
712 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
713 if (pMemoryRanges[idx0].memory) {
714 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
715 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
716 }
717 }
718 }
719 }
720 if (skip_call) {
721 return VK_ERROR_VALIDATION_FAILED_EXT;
722 }
723 VkResult result =
724 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
725 return result;
726}
727
728VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
729 const VkMappedMemoryRange *pMemoryRanges) {
730 bool skip_call = false;
731 {
732 std::lock_guard<std::mutex> lock(global_lock);
733 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
734 if (pMemoryRanges) {
735 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
736 if (pMemoryRanges[idx0].memory) {
737 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
738 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
739 }
740 }
741 }
742 }
743 if (skip_call) {
744 return VK_ERROR_VALIDATION_FAILED_EXT;
745 }
746 VkResult result =
747 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
748 return result;
749}
750
751VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
752 VkDeviceSize *pCommittedMemoryInBytes) {
753 bool skip_call = false;
754 {
755 std::lock_guard<std::mutex> lock(global_lock);
756 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
757 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
758 }
759 if (skip_call) {
760 return;
761 }
762 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
763}
764
765VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
766 VkDeviceSize memoryOffset) {
767 bool skip_call = false;
768 {
769 std::lock_guard<std::mutex> lock(global_lock);
770 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
771 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
772 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
773 }
774 if (skip_call) {
775 return VK_ERROR_VALIDATION_FAILED_EXT;
776 }
777 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
778 return result;
779}
780
781VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
782 bool skip_call = false;
783 {
784 std::lock_guard<std::mutex> lock(global_lock);
785 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
786 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
787 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
788 }
789 if (skip_call) {
790 return VK_ERROR_VALIDATION_FAILED_EXT;
791 }
792 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
793 return result;
794}
795
796VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
797 VkMemoryRequirements *pMemoryRequirements) {
798 bool skip_call = false;
799 {
800 std::lock_guard<std::mutex> lock(global_lock);
801 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
802 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
803 }
804 if (skip_call) {
805 return;
806 }
807 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
808}
809
810VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
811 bool skip_call = false;
812 {
813 std::lock_guard<std::mutex> lock(global_lock);
814 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
815 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
816 }
817 if (skip_call) {
818 return;
819 }
820 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
821}
822
823VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
824 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
825 bool skip_call = false;
826 {
827 std::lock_guard<std::mutex> lock(global_lock);
828 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
829 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
830 }
831 if (skip_call) {
832 return;
833 }
834 get_dispatch_table(ot_device_table_map, device)
835 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
836}
837
838VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
839 VkImageType type, VkSampleCountFlagBits samples,
840 VkImageUsageFlags usage, VkImageTiling tiling,
841 uint32_t *pPropertyCount,
842 VkSparseImageFormatProperties *pProperties) {
843 bool skip_call = false;
844 {
845 std::lock_guard<std::mutex> lock(global_lock);
846 skip_call |=
847 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
848 }
849 if (skip_call) {
850 return;
851 }
852 get_dispatch_table(ot_instance_table_map, physicalDevice)
853 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
854 pProperties);
855}
856
857VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
858 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
859 bool skip_call = false;
860 {
861 std::lock_guard<std::mutex> lock(global_lock);
862 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
863 }
864 if (skip_call) {
865 return VK_ERROR_VALIDATION_FAILED_EXT;
866 }
867 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
868 {
869 std::lock_guard<std::mutex> lock(global_lock);
870 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300871 CreateNonDispatchableObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600872 }
873 }
874 return result;
875}
876
877VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
878 bool skip_call = false;
879 {
880 std::lock_guard<std::mutex> lock(global_lock);
881 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
882 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
883 }
884 if (skip_call) {
885 return;
886 }
887 {
888 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +1300889 DestroyNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600890 }
891 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
892}
893
894VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
895 bool skip_call = false;
896 {
897 std::lock_guard<std::mutex> lock(global_lock);
898 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
899 if (pFences) {
900 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
901 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
902 }
903 }
904 }
905 if (skip_call) {
906 return VK_ERROR_VALIDATION_FAILED_EXT;
907 }
908 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
909 return result;
910}
911
912VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
913 bool skip_call = false;
914 {
915 std::lock_guard<std::mutex> lock(global_lock);
916 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
917 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
918 }
919 if (skip_call) {
920 return VK_ERROR_VALIDATION_FAILED_EXT;
921 }
922 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
923 return result;
924}
925
926VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
927 uint64_t timeout) {
928 bool skip_call = false;
929 {
930 std::lock_guard<std::mutex> lock(global_lock);
931 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
932 if (pFences) {
933 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
934 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
935 }
936 }
937 }
938 if (skip_call) {
939 return VK_ERROR_VALIDATION_FAILED_EXT;
940 }
941 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
942 return result;
943}
944
945VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
946 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
947 bool skip_call = false;
948 {
949 std::lock_guard<std::mutex> lock(global_lock);
950 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
951 }
952 if (skip_call) {
953 return VK_ERROR_VALIDATION_FAILED_EXT;
954 }
955 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
956 {
957 std::lock_guard<std::mutex> lock(global_lock);
958 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300959 CreateNonDispatchableObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600960 }
961 }
962 return result;
963}
964
965VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
966 bool skip_call = false;
967 {
968 std::lock_guard<std::mutex> lock(global_lock);
969 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
970 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
971 }
972 if (skip_call) {
973 return;
974 }
975 {
976 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +1300977 DestroyNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600978 }
979 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
980}
981
982VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
983 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
984 bool skip_call = false;
985 {
986 std::lock_guard<std::mutex> lock(global_lock);
987 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
988 }
989 if (skip_call) {
990 return VK_ERROR_VALIDATION_FAILED_EXT;
991 }
992 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
993 {
994 std::lock_guard<std::mutex> lock(global_lock);
995 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300996 CreateNonDispatchableObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600997 }
998 }
999 return result;
1000}
1001
1002VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
1003 bool skip_call = false;
1004 {
1005 std::lock_guard<std::mutex> lock(global_lock);
1006 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1007 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1008 }
1009 if (skip_call) {
1010 return;
1011 }
1012 {
1013 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001014 DestroyNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001015 }
1016 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
1017}
1018
1019VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
1020 bool skip_call = false;
1021 {
1022 std::lock_guard<std::mutex> lock(global_lock);
1023 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1024 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1025 }
1026 if (skip_call) {
1027 return VK_ERROR_VALIDATION_FAILED_EXT;
1028 }
1029 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
1030 return result;
1031}
1032
1033VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
1034 bool skip_call = false;
1035 {
1036 std::lock_guard<std::mutex> lock(global_lock);
1037 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1038 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1039 }
1040 if (skip_call) {
1041 return VK_ERROR_VALIDATION_FAILED_EXT;
1042 }
1043 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
1044 return result;
1045}
1046
1047VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
1048 bool skip_call = false;
1049 {
1050 std::lock_guard<std::mutex> lock(global_lock);
1051 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1052 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1053 }
1054 if (skip_call) {
1055 return VK_ERROR_VALIDATION_FAILED_EXT;
1056 }
1057 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
1058 return result;
1059}
1060
1061VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1062 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1063 bool skip_call = false;
1064 {
1065 std::lock_guard<std::mutex> lock(global_lock);
1066 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1067 }
1068 if (skip_call) {
1069 return VK_ERROR_VALIDATION_FAILED_EXT;
1070 }
1071 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1072 {
1073 std::lock_guard<std::mutex> lock(global_lock);
1074 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001075 CreateNonDispatchableObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001076 }
1077 }
1078 return result;
1079}
1080
1081VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1082 bool skip_call = false;
1083 {
1084 std::lock_guard<std::mutex> lock(global_lock);
1085 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1086 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1087 }
1088 if (skip_call) {
1089 return;
1090 }
1091 {
1092 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001093 DestroyNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001094 }
1095 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1096}
1097
1098VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1099 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1100 bool skip_call = false;
1101 {
1102 std::lock_guard<std::mutex> lock(global_lock);
1103 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1104 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1105 }
1106 if (skip_call) {
1107 return VK_ERROR_VALIDATION_FAILED_EXT;
1108 }
1109 VkResult result = get_dispatch_table(ot_device_table_map, device)
1110 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1111 return result;
1112}
1113
1114VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1115 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1116 bool skip_call = false;
1117 {
1118 std::lock_guard<std::mutex> lock(global_lock);
1119 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1120 }
1121 if (skip_call) {
1122 return VK_ERROR_VALIDATION_FAILED_EXT;
1123 }
1124 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1125 {
1126 std::lock_guard<std::mutex> lock(global_lock);
1127 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001128 CreateNonDispatchableObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001129 }
1130 }
1131 return result;
1132}
1133
1134VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1135 bool skip_call = false;
1136 {
1137 std::lock_guard<std::mutex> lock(global_lock);
1138 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1139 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1140 }
1141 if (skip_call) {
1142 return;
1143 }
1144 {
1145 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001146 DestroyNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001147 }
1148 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1149}
1150
1151VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1152 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1153 bool skip_call = false;
1154 {
1155 std::lock_guard<std::mutex> lock(global_lock);
1156 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1157 if (pCreateInfo) {
1158 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1159 }
1160 }
1161 if (skip_call) {
1162 return VK_ERROR_VALIDATION_FAILED_EXT;
1163 }
1164 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1165 {
1166 std::lock_guard<std::mutex> lock(global_lock);
1167 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001168 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001169 }
1170 }
1171 return result;
1172}
1173
1174VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1175 bool skip_call = false;
1176 {
1177 std::lock_guard<std::mutex> lock(global_lock);
1178 skip_call |= ValidateNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1179 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1180 }
1181 if (skip_call) {
1182 return;
1183 }
1184 {
1185 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001186 DestroyNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001187 }
1188 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1189}
1190
1191VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1192 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1193 bool skip_call = false;
1194 {
1195 std::lock_guard<std::mutex> lock(global_lock);
1196 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1197 }
1198 if (skip_call) {
1199 return VK_ERROR_VALIDATION_FAILED_EXT;
1200 }
1201 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1202 {
1203 std::lock_guard<std::mutex> lock(global_lock);
1204 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001205 CreateNonDispatchableObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001206 }
1207 }
1208 return result;
1209}
1210
1211VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1212 bool skip_call = false;
1213 {
1214 std::lock_guard<std::mutex> lock(global_lock);
1215 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1216 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1217 }
1218 if (skip_call) {
1219 return;
1220 }
1221 {
1222 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001223 DestroyNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001224 }
1225 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1226}
1227
1228VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1229 VkSubresourceLayout *pLayout) {
1230 bool skip_call = false;
1231 {
1232 std::lock_guard<std::mutex> lock(global_lock);
1233 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1234 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1235 }
1236 if (skip_call) {
1237 return;
1238 }
1239 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1240}
1241
1242VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1243 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1244 bool skip_call = false;
1245 {
1246 std::lock_guard<std::mutex> lock(global_lock);
1247 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1248 if (pCreateInfo) {
1249 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1250 }
1251 }
1252 if (skip_call) {
1253 return VK_ERROR_VALIDATION_FAILED_EXT;
1254 }
1255 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1256 {
1257 std::lock_guard<std::mutex> lock(global_lock);
1258 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001259 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001260 }
1261 }
1262 return result;
1263}
1264
1265VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1266 bool skip_call = false;
1267 {
1268 std::lock_guard<std::mutex> lock(global_lock);
1269 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1270 skip_call |= ValidateNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1271 }
1272 if (skip_call) {
1273 return;
1274 }
1275 {
1276 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001277 DestroyNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001278 }
1279 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1280}
1281
1282VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1283 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1284 bool skip_call = false;
1285 {
1286 std::lock_guard<std::mutex> lock(global_lock);
1287 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1288 }
1289 if (skip_call) {
1290 return VK_ERROR_VALIDATION_FAILED_EXT;
1291 }
1292 VkResult result =
1293 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1294 {
1295 std::lock_guard<std::mutex> lock(global_lock);
1296 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001297 CreateNonDispatchableObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001298 }
1299 }
1300 return result;
1301}
1302
1303VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1304 const VkAllocationCallbacks *pAllocator) {
1305 bool skip_call = false;
1306 {
1307 std::lock_guard<std::mutex> lock(global_lock);
1308 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1309 skip_call |= ValidateNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
1310 }
1311 if (skip_call) {
1312 return;
1313 }
1314 {
1315 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001316 DestroyNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001317 }
1318 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1319}
1320
1321VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1322 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1323 bool skip_call = false;
1324 {
1325 std::lock_guard<std::mutex> lock(global_lock);
1326 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1327 }
1328 if (skip_call) {
1329 return VK_ERROR_VALIDATION_FAILED_EXT;
1330 }
1331 VkResult result =
1332 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1333 {
1334 std::lock_guard<std::mutex> lock(global_lock);
1335 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001336 CreateNonDispatchableObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001337 }
1338 }
1339 return result;
1340}
1341
1342VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1343 const VkAllocationCallbacks *pAllocator) {
1344 bool skip_call = false;
1345 {
1346 std::lock_guard<std::mutex> lock(global_lock);
1347 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1348 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1349 }
1350 if (skip_call) {
1351 return;
1352 }
1353 {
1354 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001355 DestroyNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001356 }
1357 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1358}
1359
1360VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1361 void *pData) {
1362 bool skip_call = false;
1363 {
1364 std::lock_guard<std::mutex> lock(global_lock);
1365 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1366 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1367 }
1368 if (skip_call) {
1369 return VK_ERROR_VALIDATION_FAILED_EXT;
1370 }
1371 VkResult result =
1372 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1373 return result;
1374}
1375
1376VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1377 const VkPipelineCache *pSrcCaches) {
1378 bool skip_call = false;
1379 {
1380 std::lock_guard<std::mutex> lock(global_lock);
1381 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1382 skip_call |= ValidateNonDispatchableObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1383 if (pSrcCaches) {
1384 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1385 skip_call |=
1386 ValidateNonDispatchableObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1387 }
1388 }
1389 }
1390 if (skip_call) {
1391 return VK_ERROR_VALIDATION_FAILED_EXT;
1392 }
1393 VkResult result =
1394 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1395 return result;
1396}
1397
1398VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1399 bool skip_call = false;
1400 {
1401 std::lock_guard<std::mutex> lock(global_lock);
1402 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1403 skip_call |= ValidateNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1404 }
1405 if (skip_call) {
1406 return;
1407 }
1408 {
1409 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001410 DestroyNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001411 }
1412 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1413}
1414
1415VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1416 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1417 bool skip_call = false;
1418 {
1419 std::lock_guard<std::mutex> lock(global_lock);
1420 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1421 if (pCreateInfo) {
1422 if (pCreateInfo->pSetLayouts) {
1423 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
1424 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pSetLayouts[idx0],
1425 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1426 }
1427 }
1428 }
1429 }
1430 if (skip_call) {
1431 return VK_ERROR_VALIDATION_FAILED_EXT;
1432 }
1433 VkResult result =
1434 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1435 {
1436 std::lock_guard<std::mutex> lock(global_lock);
1437 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001438 CreateNonDispatchableObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001439 }
1440 }
1441 return result;
1442}
1443
1444VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1445 const VkAllocationCallbacks *pAllocator) {
1446 bool skip_call = false;
1447 {
1448 std::lock_guard<std::mutex> lock(global_lock);
1449 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1450 skip_call |= ValidateNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1451 }
1452 if (skip_call) {
1453 return;
1454 }
1455 {
1456 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001457 DestroyNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001458 }
1459 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1460}
1461
1462VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1463 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1464 bool skip_call = false;
1465 {
1466 std::lock_guard<std::mutex> lock(global_lock);
1467 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1468 }
1469 if (skip_call) {
1470 return VK_ERROR_VALIDATION_FAILED_EXT;
1471 }
1472 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1473 {
1474 std::lock_guard<std::mutex> lock(global_lock);
1475 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001476 CreateNonDispatchableObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001477 }
1478 }
1479 return result;
1480}
1481
1482VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1483 bool skip_call = false;
1484 {
1485 std::lock_guard<std::mutex> lock(global_lock);
1486 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1487 skip_call |= ValidateNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1488 }
1489 if (skip_call) {
1490 return;
1491 }
1492 {
1493 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001494 DestroyNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001495 }
1496 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1497}
1498
1499VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1500 const VkAllocationCallbacks *pAllocator,
1501 VkDescriptorSetLayout *pSetLayout) {
1502 bool skip_call = false;
1503 {
1504 std::lock_guard<std::mutex> lock(global_lock);
1505 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1506 if (pCreateInfo) {
1507 if (pCreateInfo->pBindings) {
1508 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1509 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1510 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1511 skip_call |=
1512 ValidateNonDispatchableObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
1513 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1514 }
1515 }
1516 }
1517 }
1518 }
1519 }
1520 if (skip_call) {
1521 return VK_ERROR_VALIDATION_FAILED_EXT;
1522 }
1523 VkResult result =
1524 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1525 {
1526 std::lock_guard<std::mutex> lock(global_lock);
1527 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001528 CreateNonDispatchableObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001529 }
1530 }
1531 return result;
1532}
1533
1534VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1535 const VkAllocationCallbacks *pAllocator) {
1536 bool skip_call = false;
1537 {
1538 std::lock_guard<std::mutex> lock(global_lock);
1539 skip_call |= ValidateNonDispatchableObject(device, descriptorSetLayout,
1540 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1541 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1542 }
1543 if (skip_call) {
1544 return;
1545 }
1546 {
1547 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001548 DestroyNonDispatchableObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001549 }
1550 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1551}
1552
1553VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1554 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1555 bool skip_call = false;
1556 {
1557 std::lock_guard<std::mutex> lock(global_lock);
1558 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1559 }
1560 if (skip_call) {
1561 return VK_ERROR_VALIDATION_FAILED_EXT;
1562 }
1563 VkResult result =
1564 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1565 {
1566 std::lock_guard<std::mutex> lock(global_lock);
1567 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001568 CreateNonDispatchableObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001569 }
1570 }
1571 return result;
1572}
1573
1574VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1575 VkDescriptorPoolResetFlags flags) {
1576 bool skip_call = false;
1577 {
1578 std::lock_guard<std::mutex> lock(global_lock);
1579 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1580 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1581 }
1582 if (skip_call) {
1583 return VK_ERROR_VALIDATION_FAILED_EXT;
1584 }
1585 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1586 return result;
1587}
1588
1589VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1590 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1591 const VkCopyDescriptorSet *pDescriptorCopies) {
1592 bool skip_call = false;
1593 {
1594 std::lock_guard<std::mutex> lock(global_lock);
1595 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1596 if (pDescriptorCopies) {
1597 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1598 if (pDescriptorCopies[idx0].dstSet) {
1599 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].dstSet,
1600 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1601 }
1602 if (pDescriptorCopies[idx0].srcSet) {
1603 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].srcSet,
1604 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1605 }
1606 }
1607 }
1608 if (pDescriptorWrites) {
1609 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1610 if (pDescriptorWrites[idx1].dstSet) {
1611 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].dstSet,
1612 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1613 }
1614 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1615 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1616 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1617 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1618 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1619 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
1620 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
1621 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1622 }
1623 }
1624 }
1625 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1626 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1627 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1628 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1629 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1630 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1631 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
1632 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
1633 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1634 }
1635 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
1636 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
1637 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1638 }
1639 }
1640 }
1641 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1642 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1643 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
1644 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
1645 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1646 }
1647 }
1648 }
1649 }
1650 }
1651 if (skip_call) {
1652 return;
1653 }
1654 get_dispatch_table(ot_device_table_map, device)
1655 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1656}
1657
1658VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1659 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1660 bool skip_call = false;
1661 {
1662 std::lock_guard<std::mutex> lock(global_lock);
1663 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1664 if (pCreateInfo) {
1665 if (pCreateInfo->pAttachments) {
1666 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
1667 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pAttachments[idx0],
1668 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1669 }
1670 }
1671 if (pCreateInfo->renderPass) {
1672 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->renderPass,
1673 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1674 }
1675 }
1676 }
1677 if (skip_call) {
1678 return VK_ERROR_VALIDATION_FAILED_EXT;
1679 }
1680 VkResult result =
1681 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1682 {
1683 std::lock_guard<std::mutex> lock(global_lock);
1684 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001685 CreateNonDispatchableObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001686 }
1687 }
1688 return result;
1689}
1690
1691VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1692 bool skip_call = false;
1693 {
1694 std::lock_guard<std::mutex> lock(global_lock);
1695 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1696 skip_call |= ValidateNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
1697 }
1698 if (skip_call) {
1699 return;
1700 }
1701 {
1702 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001703 DestroyNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001704 }
1705 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1706}
1707
1708VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1709 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1710 bool skip_call = false;
1711 {
1712 std::lock_guard<std::mutex> lock(global_lock);
1713 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1714 }
1715 if (skip_call) {
1716 return VK_ERROR_VALIDATION_FAILED_EXT;
1717 }
1718 VkResult result =
1719 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1720 {
1721 std::lock_guard<std::mutex> lock(global_lock);
1722 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001723 CreateNonDispatchableObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001724 }
1725 }
1726 return result;
1727}
1728
1729VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1730 bool skip_call = false;
1731 {
1732 std::lock_guard<std::mutex> lock(global_lock);
1733 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1734 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1735 }
1736 if (skip_call) {
1737 return;
1738 }
1739 {
1740 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13001741 DestroyNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001742 }
1743 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1744}
1745
1746VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1747 bool skip_call = false;
1748 {
1749 std::lock_guard<std::mutex> lock(global_lock);
1750 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1751 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1752 }
1753 if (skip_call) {
1754 return;
1755 }
1756 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1757}
1758
1759VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1760 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1761 bool skip_call = false;
1762 {
1763 std::lock_guard<std::mutex> lock(global_lock);
1764 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1765 }
1766 if (skip_call) {
1767 return VK_ERROR_VALIDATION_FAILED_EXT;
1768 }
1769 VkResult result =
1770 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1771 {
1772 std::lock_guard<std::mutex> lock(global_lock);
1773 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001774 CreateNonDispatchableObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001775 }
1776 }
1777 return result;
1778}
1779
1780VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1781 bool skip_call = false;
1782 {
1783 std::lock_guard<std::mutex> lock(global_lock);
1784 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1785 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1786 }
1787 if (skip_call) {
1788 return VK_ERROR_VALIDATION_FAILED_EXT;
1789 }
1790 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1791 return result;
1792}
1793
1794VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1795 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1796 bool skip_call = false;
1797 {
1798 std::lock_guard<std::mutex> lock(global_lock);
1799 skip_call |=
1800 ValidateDispatchableObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1801 if (begin_info) {
1802 OBJTRACK_NODE *pNode =
1803 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1804 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
1805 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
1806 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
1807 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
1808 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1809 }
1810 }
1811 }
1812 if (skip_call) {
1813 return VK_ERROR_VALIDATION_FAILED_EXT;
1814 }
1815 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1816 return result;
1817}
1818
1819VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1820 bool skip_call = false;
1821 {
1822 std::lock_guard<std::mutex> lock(global_lock);
1823 skip_call |=
1824 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1825 }
1826 if (skip_call) {
1827 return VK_ERROR_VALIDATION_FAILED_EXT;
1828 }
1829 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1830 return result;
1831}
1832
1833VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1834 bool skip_call = false;
1835 {
1836 std::lock_guard<std::mutex> lock(global_lock);
1837 skip_call |=
1838 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1839 }
1840 if (skip_call) {
1841 return VK_ERROR_VALIDATION_FAILED_EXT;
1842 }
1843 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1844 return result;
1845}
1846
1847VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1848 VkPipeline pipeline) {
1849 bool skip_call = false;
1850 {
1851 std::lock_guard<std::mutex> lock(global_lock);
1852 skip_call |=
1853 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1854 skip_call |= ValidateNonDispatchableObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1855 }
1856 if (skip_call) {
1857 return;
1858 }
1859 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1860}
1861
1862VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1863 const VkViewport *pViewports) {
1864 bool skip_call = false;
1865 {
1866 std::lock_guard<std::mutex> lock(global_lock);
1867 skip_call |=
1868 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1869 }
1870 if (skip_call) {
1871 return;
1872 }
1873 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1874}
1875
1876VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1877 const VkRect2D *pScissors) {
1878 bool skip_call = false;
1879 {
1880 std::lock_guard<std::mutex> lock(global_lock);
1881 skip_call |=
1882 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1883 }
1884 if (skip_call) {
1885 return;
1886 }
1887 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1888}
1889
1890VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1891 bool skip_call = false;
1892 {
1893 std::lock_guard<std::mutex> lock(global_lock);
1894 skip_call |=
1895 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1896 }
1897 if (skip_call) {
1898 return;
1899 }
1900 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1901}
1902
1903VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1904 float depthBiasSlopeFactor) {
1905 bool skip_call = false;
1906 {
1907 std::lock_guard<std::mutex> lock(global_lock);
1908 skip_call |=
1909 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1910 }
1911 if (skip_call) {
1912 return;
1913 }
1914 get_dispatch_table(ot_device_table_map, commandBuffer)
1915 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1916}
1917
1918VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1919 bool skip_call = false;
1920 {
1921 std::lock_guard<std::mutex> lock(global_lock);
1922 skip_call |=
1923 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1924 }
1925 if (skip_call) {
1926 return;
1927 }
1928 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1929}
1930
1931VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1932 bool skip_call = false;
1933 {
1934 std::lock_guard<std::mutex> lock(global_lock);
1935 skip_call |=
1936 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1937 }
1938 if (skip_call) {
1939 return;
1940 }
1941 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1942}
1943
1944VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1945 uint32_t compareMask) {
1946 bool skip_call = false;
1947 {
1948 std::lock_guard<std::mutex> lock(global_lock);
1949 skip_call |=
1950 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1951 }
1952 if (skip_call) {
1953 return;
1954 }
1955 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1956}
1957
1958VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1959 bool skip_call = false;
1960 {
1961 std::lock_guard<std::mutex> lock(global_lock);
1962 skip_call |=
1963 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1964 }
1965 if (skip_call) {
1966 return;
1967 }
1968 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1969}
1970
1971VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1972 bool skip_call = false;
1973 {
1974 std::lock_guard<std::mutex> lock(global_lock);
1975 skip_call |=
1976 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1977 }
1978 if (skip_call) {
1979 return;
1980 }
1981 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1982}
1983
1984VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1985 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1986 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1987 const uint32_t *pDynamicOffsets) {
1988 bool skip_call = false;
1989 {
1990 std::lock_guard<std::mutex> lock(global_lock);
1991 skip_call |=
1992 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1993 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1994 if (pDescriptorSets) {
1995 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
1996 skip_call |= ValidateNonDispatchableObject(commandBuffer, pDescriptorSets[idx0],
1997 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1998 }
1999 }
2000 }
2001 if (skip_call) {
2002 return;
2003 }
2004 get_dispatch_table(ot_device_table_map, commandBuffer)
2005 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
2006 dynamicOffsetCount, pDynamicOffsets);
2007}
2008
2009VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2010 VkIndexType indexType) {
2011 bool skip_call = false;
2012 {
2013 std::lock_guard<std::mutex> lock(global_lock);
2014 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2015 skip_call |=
2016 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2017 }
2018 if (skip_call) {
2019 return;
2020 }
2021 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
2022}
2023
2024VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
2025 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
2026 bool skip_call = false;
2027 {
2028 std::lock_guard<std::mutex> lock(global_lock);
2029 skip_call |=
2030 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2031 if (pBuffers) {
2032 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
2033 skip_call |=
2034 ValidateNonDispatchableObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2035 }
2036 }
2037 }
2038 if (skip_call) {
2039 return;
2040 }
2041 get_dispatch_table(ot_device_table_map, commandBuffer)
2042 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
2043}
2044
2045VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
2046 uint32_t firstVertex, uint32_t firstInstance) {
2047 bool skip_call = false;
2048 {
2049 std::lock_guard<std::mutex> lock(global_lock);
2050 skip_call |=
2051 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2052 }
2053 if (skip_call) {
2054 return;
2055 }
2056 get_dispatch_table(ot_device_table_map, commandBuffer)
2057 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2058}
2059
2060VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2061 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2062 bool skip_call = false;
2063 {
2064 std::lock_guard<std::mutex> lock(global_lock);
2065 skip_call |=
2066 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2067 }
2068 if (skip_call) {
2069 return;
2070 }
2071 get_dispatch_table(ot_device_table_map, commandBuffer)
2072 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2073}
2074
2075VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2076 uint32_t stride) {
2077 bool skip_call = false;
2078 {
2079 std::lock_guard<std::mutex> lock(global_lock);
2080 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2081 skip_call |=
2082 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2083 }
2084 if (skip_call) {
2085 return;
2086 }
2087 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2088}
2089
2090VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2091 uint32_t drawCount, uint32_t stride) {
2092 bool skip_call = false;
2093 {
2094 std::lock_guard<std::mutex> lock(global_lock);
2095 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2096 skip_call |=
2097 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2098 }
2099 if (skip_call) {
2100 return;
2101 }
2102 get_dispatch_table(ot_device_table_map, commandBuffer)
2103 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2104}
2105
2106VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2107 bool skip_call = false;
2108 {
2109 std::lock_guard<std::mutex> lock(global_lock);
2110 skip_call |=
2111 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2112 }
2113 if (skip_call) {
2114 return;
2115 }
2116 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2117}
2118
2119VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2120 bool skip_call = false;
2121 {
2122 std::lock_guard<std::mutex> lock(global_lock);
2123 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2124 skip_call |=
2125 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2126 }
2127 if (skip_call) {
2128 return;
2129 }
2130 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2131}
2132
2133VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2134 uint32_t regionCount, const VkBufferCopy *pRegions) {
2135 bool skip_call = false;
2136 {
2137 std::lock_guard<std::mutex> lock(global_lock);
2138 skip_call |=
2139 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2140 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2141 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2142 }
2143 if (skip_call) {
2144 return;
2145 }
2146 get_dispatch_table(ot_device_table_map, commandBuffer)
2147 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2148}
2149
2150VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2151 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2152 const VkImageCopy *pRegions) {
2153 bool skip_call = false;
2154 {
2155 std::lock_guard<std::mutex> lock(global_lock);
2156 skip_call |=
2157 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2158 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2159 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2160 }
2161 if (skip_call) {
2162 return;
2163 }
2164 get_dispatch_table(ot_device_table_map, commandBuffer)
2165 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2166}
2167
2168VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2169 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2170 const VkImageBlit *pRegions, VkFilter filter) {
2171 bool skip_call = false;
2172 {
2173 std::lock_guard<std::mutex> lock(global_lock);
2174 skip_call |=
2175 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2176 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2177 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2178 }
2179 if (skip_call) {
2180 return;
2181 }
2182 get_dispatch_table(ot_device_table_map, commandBuffer)
2183 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2184}
2185
2186VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2187 VkImageLayout dstImageLayout, uint32_t regionCount,
2188 const VkBufferImageCopy *pRegions) {
2189 bool skip_call = false;
2190 {
2191 std::lock_guard<std::mutex> lock(global_lock);
2192 skip_call |=
2193 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2194 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2195 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2196 }
2197 if (skip_call) {
2198 return;
2199 }
2200 get_dispatch_table(ot_device_table_map, commandBuffer)
2201 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2202}
2203
2204VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2205 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2206 bool skip_call = false;
2207 {
2208 std::lock_guard<std::mutex> lock(global_lock);
2209 skip_call |=
2210 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2211 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2212 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2213 }
2214 if (skip_call) {
2215 return;
2216 }
2217 get_dispatch_table(ot_device_table_map, commandBuffer)
2218 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2219}
2220
2221VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2222 VkDeviceSize dataSize, const uint32_t *pData) {
2223 bool skip_call = false;
2224 {
2225 std::lock_guard<std::mutex> lock(global_lock);
2226 skip_call |=
2227 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2228 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2229 }
2230 if (skip_call) {
2231 return;
2232 }
2233 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2234}
2235
2236VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2237 VkDeviceSize size, uint32_t data) {
2238 bool skip_call = false;
2239 {
2240 std::lock_guard<std::mutex> lock(global_lock);
2241 skip_call |=
2242 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2243 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2244 }
2245 if (skip_call) {
2246 return;
2247 }
2248 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2249}
2250
2251VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2252 const VkClearColorValue *pColor, uint32_t rangeCount,
2253 const VkImageSubresourceRange *pRanges) {
2254 bool skip_call = false;
2255 {
2256 std::lock_guard<std::mutex> lock(global_lock);
2257 skip_call |=
2258 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2259 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2260 }
2261 if (skip_call) {
2262 return;
2263 }
2264 get_dispatch_table(ot_device_table_map, commandBuffer)
2265 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2266}
2267
2268VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2269 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2270 const VkImageSubresourceRange *pRanges) {
2271 bool skip_call = false;
2272 {
2273 std::lock_guard<std::mutex> lock(global_lock);
2274 skip_call |=
2275 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2276 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2277 }
2278 if (skip_call) {
2279 return;
2280 }
2281 get_dispatch_table(ot_device_table_map, commandBuffer)
2282 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2283}
2284
2285VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2286 const VkClearAttachment *pAttachments, uint32_t rectCount,
2287 const VkClearRect *pRects) {
2288 bool skip_call = false;
2289 {
2290 std::lock_guard<std::mutex> lock(global_lock);
2291 skip_call |=
2292 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2293 }
2294 if (skip_call) {
2295 return;
2296 }
2297 get_dispatch_table(ot_device_table_map, commandBuffer)
2298 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2299}
2300
2301VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2302 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2303 const VkImageResolve *pRegions) {
2304 bool skip_call = false;
2305 {
2306 std::lock_guard<std::mutex> lock(global_lock);
2307 skip_call |=
2308 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2309 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2310 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2311 }
2312 if (skip_call) {
2313 return;
2314 }
2315 get_dispatch_table(ot_device_table_map, commandBuffer)
2316 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2317}
2318
2319VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2320 bool skip_call = false;
2321 {
2322 std::lock_guard<std::mutex> lock(global_lock);
2323 skip_call |=
2324 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2325 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2326 }
2327 if (skip_call) {
2328 return;
2329 }
2330 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2331}
2332
2333VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2334 bool skip_call = false;
2335 {
2336 std::lock_guard<std::mutex> lock(global_lock);
2337 skip_call |=
2338 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2339 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2340 }
2341 if (skip_call) {
2342 return;
2343 }
2344 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2345}
2346
2347VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2348 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2349 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2350 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2351 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2352 bool skip_call = false;
2353 {
2354 std::lock_guard<std::mutex> lock(global_lock);
2355 skip_call |=
2356 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2357 if (pBufferMemoryBarriers) {
2358 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2359 if (pBufferMemoryBarriers[idx0].buffer) {
2360 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2361 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2362 }
2363 }
2364 }
2365 if (pEvents) {
2366 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2367 skip_call |=
2368 ValidateNonDispatchableObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2369 }
2370 }
2371 if (pImageMemoryBarriers) {
2372 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2373 if (pImageMemoryBarriers[idx2].image) {
2374 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx2].image,
2375 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2376 }
2377 }
2378 }
2379 }
2380 if (skip_call) {
2381 return;
2382 }
2383 get_dispatch_table(ot_device_table_map, commandBuffer)
2384 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2385 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2386}
2387
2388VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2389 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2390 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2391 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2392 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2393 bool skip_call = false;
2394 {
2395 std::lock_guard<std::mutex> lock(global_lock);
2396 skip_call |=
2397 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2398 if (pBufferMemoryBarriers) {
2399 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2400 if (pBufferMemoryBarriers[idx0].buffer) {
2401 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2402 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2403 }
2404 }
2405 }
2406 if (pImageMemoryBarriers) {
2407 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2408 if (pImageMemoryBarriers[idx1].image) {
2409 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx1].image,
2410 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2411 }
2412 }
2413 }
2414 }
2415 if (skip_call) {
2416 return;
2417 }
2418 get_dispatch_table(ot_device_table_map, commandBuffer)
2419 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2420 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2421}
2422
2423VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2424 VkQueryControlFlags flags) {
2425 bool skip_call = false;
2426 {
2427 std::lock_guard<std::mutex> lock(global_lock);
2428 skip_call |=
2429 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2430 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2431 }
2432 if (skip_call) {
2433 return;
2434 }
2435 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2436}
2437
2438VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2439 bool skip_call = false;
2440 {
2441 std::lock_guard<std::mutex> lock(global_lock);
2442 skip_call |=
2443 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2444 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2445 }
2446 if (skip_call) {
2447 return;
2448 }
2449 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2450}
2451
2452VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2453 uint32_t queryCount) {
2454 bool skip_call = false;
2455 {
2456 std::lock_guard<std::mutex> lock(global_lock);
2457 skip_call |=
2458 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2459 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2460 }
2461 if (skip_call) {
2462 return;
2463 }
2464 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2465}
2466
2467VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2468 VkQueryPool queryPool, uint32_t query) {
2469 bool skip_call = false;
2470 {
2471 std::lock_guard<std::mutex> lock(global_lock);
2472 skip_call |=
2473 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2474 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2475 }
2476 if (skip_call) {
2477 return;
2478 }
2479 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2480}
2481
2482VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2483 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2484 VkDeviceSize stride, VkQueryResultFlags flags) {
2485 bool skip_call = false;
2486 {
2487 std::lock_guard<std::mutex> lock(global_lock);
2488 skip_call |=
2489 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2490 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2491 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2492 }
2493 if (skip_call) {
2494 return;
2495 }
2496 get_dispatch_table(ot_device_table_map, commandBuffer)
2497 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2498}
2499
2500VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2501 uint32_t offset, uint32_t size, const void *pValues) {
2502 bool skip_call = false;
2503 {
2504 std::lock_guard<std::mutex> lock(global_lock);
2505 skip_call |=
2506 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2507 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
2508 }
2509 if (skip_call) {
2510 return;
2511 }
2512 get_dispatch_table(ot_device_table_map, commandBuffer)
2513 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2514}
2515
2516VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2517 VkSubpassContents contents) {
2518 bool skip_call = false;
2519 {
2520 std::lock_guard<std::mutex> lock(global_lock);
2521 skip_call |=
2522 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2523 if (pRenderPassBegin) {
2524 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->framebuffer,
2525 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
2526 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->renderPass,
2527 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2528 }
2529 }
2530 if (skip_call) {
2531 return;
2532 }
2533 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2534}
2535
2536VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2537 bool skip_call = false;
2538 {
2539 std::lock_guard<std::mutex> lock(global_lock);
2540 skip_call |=
2541 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2542 }
2543 if (skip_call) {
2544 return;
2545 }
2546 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2547}
2548
2549VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2550 bool skip_call = false;
2551 {
2552 std::lock_guard<std::mutex> lock(global_lock);
2553 skip_call |=
2554 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2555 }
2556 if (skip_call) {
2557 return;
2558 }
2559 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2560}
2561
2562VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2563 const VkCommandBuffer *pCommandBuffers) {
2564 bool skip_call = false;
2565 {
2566 std::lock_guard<std::mutex> lock(global_lock);
2567 skip_call |=
2568 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2569 if (pCommandBuffers) {
2570 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
2571 skip_call |= ValidateDispatchableObject(commandBuffer, pCommandBuffers[idx0],
2572 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2573 }
2574 }
2575 }
2576 if (skip_call) {
2577 return;
2578 }
2579 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2580}
2581
2582VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2583 bool skip_call = false;
2584 {
2585 std::lock_guard<std::mutex> lock(global_lock);
2586 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2587 skip_call |= ValidateNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2588 }
2589 if (skip_call) {
2590 return;
2591 }
2592 {
2593 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes3e51a202016-09-29 14:35:09 +13002594 DestroyNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002595 }
2596 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2597}
2598
2599VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2600 VkSurfaceKHR surface, VkBool32 *pSupported) {
2601 bool skip_call = false;
2602 {
2603 std::lock_guard<std::mutex> lock(global_lock);
2604 skip_call |=
2605 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2606 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2607 }
2608 if (skip_call) {
2609 return VK_ERROR_VALIDATION_FAILED_EXT;
2610 }
2611 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2612 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2613 return result;
2614}
2615
2616VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2617 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2618 bool skip_call = false;
2619 {
2620 std::lock_guard<std::mutex> lock(global_lock);
2621 skip_call |=
2622 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2623 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2624 }
2625 if (skip_call) {
2626 return VK_ERROR_VALIDATION_FAILED_EXT;
2627 }
2628 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2629 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2630 return result;
2631}
2632
2633VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2634 uint32_t *pSurfaceFormatCount,
2635 VkSurfaceFormatKHR *pSurfaceFormats) {
2636 bool skip_call = false;
2637 {
2638 std::lock_guard<std::mutex> lock(global_lock);
2639 skip_call |=
2640 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2641 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2642 }
2643 if (skip_call) {
2644 return VK_ERROR_VALIDATION_FAILED_EXT;
2645 }
2646 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2647 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2648 return result;
2649}
2650
2651VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2652 uint32_t *pPresentModeCount,
2653 VkPresentModeKHR *pPresentModes) {
2654 bool skip_call = false;
2655 {
2656 std::lock_guard<std::mutex> lock(global_lock);
2657 skip_call |=
2658 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2659 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2660 }
2661 if (skip_call) {
2662 return VK_ERROR_VALIDATION_FAILED_EXT;
2663 }
2664 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2665 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2666 return result;
2667}
2668
2669VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2670 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2671 bool skip_call = false;
2672 {
2673 std::lock_guard<std::mutex> lock(global_lock);
2674 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2675 if (pCreateInfo) {
2676 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->oldSwapchain,
2677 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2678 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2679 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfo->surface,
2680 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2681 }
2682 }
2683 if (skip_call) {
2684 return VK_ERROR_VALIDATION_FAILED_EXT;
2685 }
2686 VkResult result =
2687 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2688 {
2689 std::lock_guard<std::mutex> lock(global_lock);
2690 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002691 CreateNonDispatchableObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002692 }
2693 }
2694 return result;
2695}
2696
2697VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2698 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2699 bool skip_call = false;
2700 {
2701 std::lock_guard<std::mutex> lock(global_lock);
2702 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2703 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2704 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2705 skip_call |= ValidateNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2706 }
2707 if (skip_call) {
2708 return VK_ERROR_VALIDATION_FAILED_EXT;
2709 }
2710 VkResult result = get_dispatch_table(ot_device_table_map, device)
2711 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2712 return result;
2713}
2714
2715VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2716 bool skip_call = false;
2717 {
2718 std::lock_guard<std::mutex> lock(global_lock);
2719 if (pPresentInfo) {
2720 if (pPresentInfo->pSwapchains) {
2721 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
2722 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pSwapchains[idx0],
2723 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2724 }
2725 }
2726 if (pPresentInfo->pWaitSemaphores) {
2727 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
2728 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pWaitSemaphores[idx1],
2729 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2730 }
2731 }
2732 }
2733 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
2734 }
2735 if (skip_call) {
2736 return VK_ERROR_VALIDATION_FAILED_EXT;
2737 }
2738 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2739 return result;
2740}
2741
2742#ifdef VK_USE_PLATFORM_WIN32_KHR
2743VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2744 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2745 bool skip_call = false;
2746 {
2747 std::lock_guard<std::mutex> lock(global_lock);
2748 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2749 }
2750 if (skip_call) {
2751 return VK_ERROR_VALIDATION_FAILED_EXT;
2752 }
2753 VkResult result =
2754 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2755 {
2756 std::lock_guard<std::mutex> lock(global_lock);
2757 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002758 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002759 }
2760 }
2761 return result;
2762}
2763
2764VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2765 uint32_t queueFamilyIndex) {
2766 bool skip_call = false;
2767 {
2768 std::lock_guard<std::mutex> lock(global_lock);
2769 skip_call |=
2770 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2771 }
2772 if (skip_call) {
2773 return VK_FALSE;
2774 }
2775 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2776 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2777 return result;
2778}
2779#endif // VK_USE_PLATFORM_WIN32_KHR
2780
2781#ifdef VK_USE_PLATFORM_XCB_KHR
2782VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2783 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2784 bool skip_call = false;
2785 {
2786 std::lock_guard<std::mutex> lock(global_lock);
2787 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2788 }
2789 if (skip_call) {
2790 return VK_ERROR_VALIDATION_FAILED_EXT;
2791 }
2792 VkResult result =
2793 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2794 {
2795 std::lock_guard<std::mutex> lock(global_lock);
2796 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002797 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002798 }
2799 }
2800 return result;
2801}
2802
2803VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2804 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2805 xcb_visualid_t visual_id) {
2806 bool skip_call = false;
2807 {
2808 std::lock_guard<std::mutex> lock(global_lock);
2809 skip_call |=
2810 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2811 }
2812 if (skip_call) {
2813 return VK_FALSE;
2814 }
2815 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2816 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2817 return result;
2818}
2819#endif // VK_USE_PLATFORM_XCB_KHR
2820
2821#ifdef VK_USE_PLATFORM_XLIB_KHR
2822VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2823 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2824 bool skip_call = false;
2825 {
2826 std::lock_guard<std::mutex> lock(global_lock);
2827 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2828 }
2829 if (skip_call) {
2830 return VK_ERROR_VALIDATION_FAILED_EXT;
2831 }
2832 VkResult result =
2833 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2834 {
2835 std::lock_guard<std::mutex> lock(global_lock);
2836 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002837 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002838 }
2839 }
2840 return result;
2841}
2842
2843VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2844 uint32_t queueFamilyIndex, Display *dpy,
2845 VisualID visualID) {
2846 bool skip_call = false;
2847 {
2848 std::lock_guard<std::mutex> lock(global_lock);
2849 skip_call |=
2850 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2851 }
2852 if (skip_call) {
2853 return VK_FALSE;
2854 }
2855 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2856 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2857 return result;
2858}
2859#endif // VK_USE_PLATFORM_XLIB_KHR
2860
2861#ifdef VK_USE_PLATFORM_MIR_KHR
2862VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2863 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2864 bool skip_call = false;
2865 {
2866 std::lock_guard<std::mutex> lock(global_lock);
2867 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2868 }
2869 if (skip_call) {
2870 return VK_ERROR_VALIDATION_FAILED_EXT;
2871 }
2872 VkResult result =
2873 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2874 {
2875 std::lock_guard<std::mutex> lock(global_lock);
2876 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002877 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002878 }
2879 }
2880 return result;
2881}
2882
2883VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2884 uint32_t queueFamilyIndex, MirConnection *connection) {
2885 bool skip_call = false;
2886 {
2887 std::lock_guard<std::mutex> lock(global_lock);
2888 skip_call |=
2889 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2890 }
2891 if (skip_call) {
2892 return VK_FALSE;
2893 }
2894 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2895 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2896 return result;
2897}
2898#endif // VK_USE_PLATFORM_MIR_KHR
2899
2900#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2901VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2902 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2903 bool skip_call = false;
2904 {
2905 std::lock_guard<std::mutex> lock(global_lock);
2906 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2907 }
2908 if (skip_call) {
2909 return VK_ERROR_VALIDATION_FAILED_EXT;
2910 }
2911 VkResult result =
2912 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2913 {
2914 std::lock_guard<std::mutex> lock(global_lock);
2915 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002916 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002917 }
2918 }
2919 return result;
2920}
2921
2922VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2923 uint32_t queueFamilyIndex,
2924 struct wl_display *display) {
2925 bool skip_call = false;
2926 {
2927 std::lock_guard<std::mutex> lock(global_lock);
2928 skip_call |=
2929 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2930 }
2931 if (skip_call) {
2932 return VK_FALSE;
2933 }
2934 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2935 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2936 return result;
2937}
2938#endif // VK_USE_PLATFORM_WAYLAND_KHR
2939
2940#ifdef VK_USE_PLATFORM_ANDROID_KHR
2941VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2942 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2943 bool skip_call = false;
2944 {
2945 std::lock_guard<std::mutex> lock(global_lock);
2946 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2947 }
2948 if (skip_call) {
2949 return VK_ERROR_VALIDATION_FAILED_EXT;
2950 }
2951 VkResult result =
2952 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2953 {
2954 std::lock_guard<std::mutex> lock(global_lock);
2955 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002956 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002957 }
2958 }
2959 return result;
2960}
2961#endif // VK_USE_PLATFORM_ANDROID_KHR
2962
Mark Youngead9b932016-09-08 12:28:38 -06002963VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
2964 const VkSwapchainCreateInfoKHR *pCreateInfos,
2965 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
2966 bool skip_call = false;
2967 uint32_t i = 0;
2968 {
2969 std::lock_guard<std::mutex> lock(global_lock);
2970 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2971 if (NULL != pCreateInfos) {
2972 for (i = 0; i < swapchainCount; i++) {
2973 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[i].oldSwapchain,
2974 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2975 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2976 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfos[i].surface,
2977 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2978 }
2979 }
2980 }
2981 if (skip_call) {
2982 return VK_ERROR_VALIDATION_FAILED_EXT;
2983 }
2984 VkResult result =
2985 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
2986 {
2987 std::lock_guard<std::mutex> lock(global_lock);
2988 if (result == VK_SUCCESS) {
2989 for (i = 0; i < swapchainCount; i++) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002990 CreateNonDispatchableObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Youngead9b932016-09-08 12:28:38 -06002991 }
2992 }
2993 }
2994 return result;
2995}
2996
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002997VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2998 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2999 const VkAllocationCallbacks *pAllocator,
3000 VkDebugReportCallbackEXT *pCallback) {
3001 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
3002 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
3003 if (VK_SUCCESS == result) {
3004 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3005 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003006 CreateNonDispatchableObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003007 }
3008 return result;
3009}
3010
3011VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
3012 const VkAllocationCallbacks *pAllocator) {
3013 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
3014 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
3015 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3016 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
Chris Forbes3e51a202016-09-29 14:35:09 +13003017 DestroyNonDispatchableObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003018}
3019
3020VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
3021 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
3022 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
3023 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
3024 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
3025}
3026
3027static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
3028
3029static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
3030 VK_LAYER_API_VERSION, // specVersion
3031 1, // implementationVersion
3032 "LunarG Validation Layer"};
3033
3034VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
3035 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
3036}
3037
3038VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
3039 VkLayerProperties *pProperties) {
3040 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
3041}
3042
3043VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3044 VkExtensionProperties *pProperties) {
3045 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3046 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
3047
3048 return VK_ERROR_LAYER_NOT_PRESENT;
3049}
3050
3051VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
3052 uint32_t *pCount, VkExtensionProperties *pProperties) {
3053 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3054 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
3055
3056 assert(physicalDevice);
3057 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3058 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3059}
3060
3061static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3062 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3063 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3064}
3065
3066static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3067 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3068 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3069 return nullptr;
3070
3071 if (!strcmp("vkDestroySurfaceKHR", name))
3072 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3073 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3074 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3075 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3076 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3077 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3078 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3079 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3080 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3081
3082#ifdef VK_USE_PLATFORM_WIN32_KHR
3083 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3084 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3085 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3086 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3087#endif // VK_USE_PLATFORM_WIN32_KHR
3088#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003089 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003090 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003091 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003092 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3093#endif // VK_USE_PLATFORM_XCB_KHR
3094#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003095 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003096 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003097 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003098 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3099#endif // VK_USE_PLATFORM_XLIB_KHR
3100#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003101 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003102 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003103 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003104 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3105#endif // VK_USE_PLATFORM_MIR_KHR
3106#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003107 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003108 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003109 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003110 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3111#endif // VK_USE_PLATFORM_WAYLAND_KHR
3112#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003113 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003114 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3115#endif // VK_USE_PLATFORM_ANDROID_KHR
3116
3117 return nullptr;
3118}
3119
3120static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3121 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3122 device_data->wsi_enabled = false;
Mark Youngead9b932016-09-08 12:28:38 -06003123 device_data->wsi_display_swapchain_enabled = false;
3124 device_data->objtrack_extensions_enabled = false;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003125
3126 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3127 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3128 device_data->wsi_enabled = true;
3129 }
Mark Youngead9b932016-09-08 12:28:38 -06003130 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) {
3131 device_data->wsi_display_swapchain_enabled = true;
3132 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003133 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3134 device_data->objtrack_extensions_enabled = true;
3135 }
3136 }
3137}
3138
3139static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3140 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3141
3142
3143 instanceExtMap[pDisp] = {};
3144
3145 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3146 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3147 instanceExtMap[pDisp].wsi_enabled = true;
3148 }
3149#ifdef VK_USE_PLATFORM_XLIB_KHR
3150 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3151 instanceExtMap[pDisp].xlib_enabled = true;
3152 }
3153#endif
3154#ifdef VK_USE_PLATFORM_XCB_KHR
3155 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3156 instanceExtMap[pDisp].xcb_enabled = true;
3157 }
3158#endif
3159#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3160 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3161 instanceExtMap[pDisp].wayland_enabled = true;
3162 }
3163#endif
3164#ifdef VK_USE_PLATFORM_MIR_KHR
3165 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3166 instanceExtMap[pDisp].mir_enabled = true;
3167 }
3168#endif
3169#ifdef VK_USE_PLATFORM_ANDROID_KHR
3170 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3171 instanceExtMap[pDisp].android_enabled = true;
3172 }
3173#endif
3174#ifdef VK_USE_PLATFORM_WIN32_KHR
3175 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3176 instanceExtMap[pDisp].win32_enabled = true;
3177 }
3178#endif
3179 }
3180}
3181
3182VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3183 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3184 std::lock_guard<std::mutex> lock(global_lock);
3185 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3186 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3187
3188 assert(chain_info->u.pLayerInfo);
3189 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3190 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3191 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3192 if (fpCreateDevice == NULL) {
3193 return VK_ERROR_INITIALIZATION_FAILED;
3194 }
3195
3196 // Advance the link info for the next element on the chain
3197 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3198
3199 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3200 if (result != VK_SUCCESS) {
3201 return result;
3202 }
3203
3204 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3205 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3206
3207 // Add link back to physDev
3208 device_data->physical_device = physicalDevice;
3209
3210 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3211
3212 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003213 CreateDispatchableObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003214
3215 return result;
3216}
3217
3218VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3219 uint32_t *pQueueFamilyPropertyCount,
3220 VkQueueFamilyProperties *pQueueFamilyProperties) {
3221 get_dispatch_table(ot_instance_table_map, physicalDevice)
3222 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3223 std::lock_guard<std::mutex> lock(global_lock);
3224 if (pQueueFamilyProperties != NULL) {
3225 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3226 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3227 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3228 }
3229 }
3230}
3231
3232VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3233 VkInstance *pInstance) {
3234 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3235
3236 assert(chain_info->u.pLayerInfo);
3237 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3238 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3239 if (fpCreateInstance == NULL) {
3240 return VK_ERROR_INITIALIZATION_FAILED;
3241 }
3242
3243 // Advance the link info for the next element on the chain
3244 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3245
3246 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3247 if (result != VK_SUCCESS) {
3248 return result;
3249 }
3250
3251 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3252 instance_data->instance = *pInstance;
3253 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3254 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3255
3256 // Look for one or more debug report create info structures, and copy the
3257 // callback(s) for each one found (for use by vkDestroyInstance)
3258 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3259 &instance_data->tmp_callbacks);
3260
3261 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3262 pCreateInfo->ppEnabledExtensionNames);
3263
3264 InitObjectTracker(instance_data, pAllocator);
3265 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3266
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003267 CreateDispatchableObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003268
3269 return result;
3270}
3271
3272VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3273 VkPhysicalDevice *pPhysicalDevices) {
3274 bool skip_call = VK_FALSE;
3275 std::unique_lock<std::mutex> lock(global_lock);
3276 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
3277 lock.unlock();
3278 if (skip_call) {
3279 return VK_ERROR_VALIDATION_FAILED_EXT;
3280 }
3281 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3282 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3283 lock.lock();
3284 if (result == VK_SUCCESS) {
3285 if (pPhysicalDevices) {
3286 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003287 CreateDispatchableObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003288 }
3289 }
3290 }
3291 lock.unlock();
3292 return result;
3293}
3294
3295VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3296 std::unique_lock<std::mutex> lock(global_lock);
3297 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3298 lock.unlock();
3299
3300 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3301
3302 lock.lock();
3303
3304 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3305 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3306}
3307
3308VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3309 std::unique_lock<std::mutex> lock(global_lock);
3310 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3311 lock.unlock();
3312
3313 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3314
3315 lock.lock();
Chris Forbes3e51a202016-09-29 14:35:09 +13003316 DestroyNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003317}
3318
3319VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3320 VkMemoryMapFlags flags, void **ppData) {
3321 bool skip_call = VK_FALSE;
3322 std::unique_lock<std::mutex> lock(global_lock);
3323 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3324 lock.unlock();
3325 if (skip_call == VK_TRUE) {
3326 return VK_ERROR_VALIDATION_FAILED_EXT;
3327 }
3328 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3329 return result;
3330}
3331
3332VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3333 bool skip_call = VK_FALSE;
3334 std::unique_lock<std::mutex> lock(global_lock);
3335 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3336 lock.unlock();
3337 if (skip_call == VK_TRUE) {
3338 return;
3339 }
3340
3341 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3342}
3343VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3344 VkFence fence) {
3345 std::unique_lock<std::mutex> lock(global_lock);
3346 ValidateQueueFlags(queue, "QueueBindSparse");
3347
3348 for (uint32_t i = 0; i < bindInfoCount; i++) {
3349 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
3350 ValidateNonDispatchableObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3351 false);
3352 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
3353 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3354 false);
3355 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
3356 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
3357 }
3358 lock.unlock();
3359
3360 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3361 return result;
3362}
3363
3364VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3365 VkCommandBuffer *pCommandBuffers) {
3366 bool skip_call = VK_FALSE;
3367 std::unique_lock<std::mutex> lock(global_lock);
3368 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3369 skip_call |=
3370 ValidateNonDispatchableObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3371 lock.unlock();
3372
3373 if (skip_call) {
3374 return VK_ERROR_VALIDATION_FAILED_EXT;
3375 }
3376
3377 VkResult result =
3378 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3379
3380 lock.lock();
3381 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3382 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3383 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3384 }
3385 lock.unlock();
3386
3387 return result;
3388}
3389
3390VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3391 VkDescriptorSet *pDescriptorSets) {
3392 bool skip_call = VK_FALSE;
3393 std::unique_lock<std::mutex> lock(global_lock);
3394 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3395 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->descriptorPool,
3396 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3397 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3398 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->pSetLayouts[i],
3399 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3400 }
3401 lock.unlock();
3402 if (skip_call) {
3403 return VK_ERROR_VALIDATION_FAILED_EXT;
3404 }
3405
3406 VkResult result =
3407 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3408
3409 if (VK_SUCCESS == result) {
3410 lock.lock();
3411 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3412 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3413 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3414 }
3415 lock.unlock();
3416 }
3417
3418 return result;
3419}
3420
3421VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3422 const VkCommandBuffer *pCommandBuffers) {
3423 bool skip_call = false;
3424 std::unique_lock<std::mutex> lock(global_lock);
3425 ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3426 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3427 for (uint32_t i = 0; i < commandBufferCount; i++) {
3428 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3429 }
3430
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003431 for (uint32_t i = 0; i < commandBufferCount; i++) {
Chris Forbes3e51a202016-09-29 14:35:09 +13003432 DestroyDispatchableObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003433 }
3434
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003435 lock.unlock();
3436 if (!skip_call) {
3437 get_dispatch_table(ot_device_table_map, device)
3438 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3439 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003440}
3441VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3442 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3443 std::unique_lock<std::mutex> lock(global_lock);
3444 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3445 // Remove this swapchain's images from our map of such images.
3446 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3447 while (itr != device_data->swapchainImageMap.end()) {
3448 OBJTRACK_NODE *pNode = (*itr).second;
3449 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3450 delete pNode;
3451 auto delete_item = itr++;
3452 device_data->swapchainImageMap.erase(delete_item);
3453 } else {
3454 ++itr;
3455 }
3456 }
Chris Forbes3e51a202016-09-29 14:35:09 +13003457 DestroyNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003458 lock.unlock();
3459
3460 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3461}
3462
3463VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3464 const VkDescriptorSet *pDescriptorSets) {
3465 bool skip_call = false;
3466 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3467 std::unique_lock<std::mutex> lock(global_lock);
3468 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3469 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3470 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3471 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3472 }
3473
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003474 for (uint32_t i = 0; i < descriptorSetCount; i++) {
Chris Forbes3e51a202016-09-29 14:35:09 +13003475 DestroyNonDispatchableObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003476 }
3477
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003478 lock.unlock();
3479 if (!skip_call) {
3480 result = get_dispatch_table(ot_device_table_map, device)
3481 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3482 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003483 return result;
3484}
3485
3486VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3487 const VkAllocationCallbacks *pAllocator) {
3488 bool skip_call = VK_FALSE;
3489 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3490 std::unique_lock<std::mutex> lock(global_lock);
3491 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3492 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3493 lock.unlock();
3494 if (skip_call) {
3495 return;
3496 }
3497 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3498 // Remove this pool's descriptor sets from our descriptorSet map.
3499 lock.lock();
3500 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3501 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3502 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3503 OBJTRACK_NODE *pNode = (*itr).second;
3504 auto del_itr = itr++;
3505 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
3506 DestroyNonDispatchableObject(device, (VkDescriptorSet)((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003507 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003508 }
3509 }
Chris Forbes3e51a202016-09-29 14:35:09 +13003510 DestroyNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003511 lock.unlock();
3512 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3513}
3514
3515VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3516 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3517 bool skip_call = false;
3518 std::unique_lock<std::mutex> lock(global_lock);
3519 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3520 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3521 lock.unlock();
3522 if (skip_call) {
3523 return;
3524 }
3525 lock.lock();
3526 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3527 // Remove this pool's cmdBuffers from our cmd buffer map.
3528 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3529 auto del_itr = itr;
3530 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3531 OBJTRACK_NODE *pNode = (*itr).second;
3532 del_itr = itr++;
3533 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3534 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
3535 DestroyDispatchableObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003536 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003537 }
3538 }
Chris Forbes3e51a202016-09-29 14:35:09 +13003539 DestroyNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003540 lock.unlock();
3541 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3542}
3543
3544VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3545 VkImage *pSwapchainImages) {
3546 bool skip_call = VK_FALSE;
3547 std::unique_lock<std::mutex> lock(global_lock);
3548 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3549 lock.unlock();
3550 if (skip_call) {
3551 return VK_ERROR_VALIDATION_FAILED_EXT;
3552 }
3553 VkResult result = get_dispatch_table(ot_device_table_map, device)
3554 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3555 if (pSwapchainImages != NULL) {
3556 lock.lock();
3557 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3558 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3559 }
3560 lock.unlock();
3561 }
3562 return result;
3563}
3564
3565VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3566 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3567 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3568 bool skip_call = VK_FALSE;
3569 std::unique_lock<std::mutex> lock(global_lock);
3570 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3571 if (pCreateInfos) {
3572 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3573 if (pCreateInfos[idx0].basePipelineHandle) {
3574 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3575 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3576 }
3577 if (pCreateInfos[idx0].layout) {
3578 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3579 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3580 }
3581 if (pCreateInfos[idx0].pStages) {
3582 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3583 if (pCreateInfos[idx0].pStages[idx1].module) {
3584 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].pStages[idx1].module,
3585 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3586 }
3587 }
3588 }
3589 if (pCreateInfos[idx0].renderPass) {
3590 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].renderPass,
3591 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3592 }
3593 }
3594 }
3595 if (pipelineCache) {
3596 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3597 }
3598 lock.unlock();
3599 if (skip_call) {
3600 return VK_ERROR_VALIDATION_FAILED_EXT;
3601 }
3602 VkResult result = get_dispatch_table(ot_device_table_map, device)
3603 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3604 lock.lock();
3605 if (result == VK_SUCCESS) {
3606 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003607 CreateNonDispatchableObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003608 }
3609 }
3610 lock.unlock();
3611 return result;
3612}
3613
3614VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3615 const VkComputePipelineCreateInfo *pCreateInfos,
3616 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3617 bool skip_call = VK_FALSE;
3618 std::unique_lock<std::mutex> lock(global_lock);
3619 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3620 if (pCreateInfos) {
3621 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3622 if (pCreateInfos[idx0].basePipelineHandle) {
3623 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3624 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3625 }
3626 if (pCreateInfos[idx0].layout) {
3627 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3628 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3629 }
3630 if (pCreateInfos[idx0].stage.module) {
3631 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].stage.module,
3632 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3633 }
3634 }
3635 }
3636 if (pipelineCache) {
3637 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3638 }
3639 lock.unlock();
3640 if (skip_call) {
3641 return VK_ERROR_VALIDATION_FAILED_EXT;
3642 }
3643 VkResult result = get_dispatch_table(ot_device_table_map, device)
3644 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3645 lock.lock();
3646 if (result == VK_SUCCESS) {
3647 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003648 CreateNonDispatchableObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003649 }
3650 }
3651 lock.unlock();
3652 return result;
3653}
3654
3655static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3656 if (!name || name[0] != 'v' || name[1] != 'k')
3657 return NULL;
3658
3659 name += 2;
3660 if (!strcmp(name, "GetDeviceProcAddr"))
3661 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3662 if (!strcmp(name, "DestroyDevice"))
3663 return (PFN_vkVoidFunction)DestroyDevice;
3664 if (!strcmp(name, "GetDeviceQueue"))
3665 return (PFN_vkVoidFunction)GetDeviceQueue;
3666 if (!strcmp(name, "QueueSubmit"))
3667 return (PFN_vkVoidFunction)QueueSubmit;
3668 if (!strcmp(name, "QueueWaitIdle"))
3669 return (PFN_vkVoidFunction)QueueWaitIdle;
3670 if (!strcmp(name, "DeviceWaitIdle"))
3671 return (PFN_vkVoidFunction)DeviceWaitIdle;
3672 if (!strcmp(name, "AllocateMemory"))
3673 return (PFN_vkVoidFunction)AllocateMemory;
3674 if (!strcmp(name, "FreeMemory"))
3675 return (PFN_vkVoidFunction)FreeMemory;
3676 if (!strcmp(name, "MapMemory"))
3677 return (PFN_vkVoidFunction)MapMemory;
3678 if (!strcmp(name, "UnmapMemory"))
3679 return (PFN_vkVoidFunction)UnmapMemory;
3680 if (!strcmp(name, "FlushMappedMemoryRanges"))
3681 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3682 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3683 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3684 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3685 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3686 if (!strcmp(name, "BindBufferMemory"))
3687 return (PFN_vkVoidFunction)BindBufferMemory;
3688 if (!strcmp(name, "BindImageMemory"))
3689 return (PFN_vkVoidFunction)BindImageMemory;
3690 if (!strcmp(name, "GetBufferMemoryRequirements"))
3691 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3692 if (!strcmp(name, "GetImageMemoryRequirements"))
3693 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3694 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3695 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3696 if (!strcmp(name, "QueueBindSparse"))
3697 return (PFN_vkVoidFunction)QueueBindSparse;
3698 if (!strcmp(name, "CreateFence"))
3699 return (PFN_vkVoidFunction)CreateFence;
3700 if (!strcmp(name, "DestroyFence"))
3701 return (PFN_vkVoidFunction)DestroyFence;
3702 if (!strcmp(name, "ResetFences"))
3703 return (PFN_vkVoidFunction)ResetFences;
3704 if (!strcmp(name, "GetFenceStatus"))
3705 return (PFN_vkVoidFunction)GetFenceStatus;
3706 if (!strcmp(name, "WaitForFences"))
3707 return (PFN_vkVoidFunction)WaitForFences;
3708 if (!strcmp(name, "CreateSemaphore"))
3709 return (PFN_vkVoidFunction)CreateSemaphore;
3710 if (!strcmp(name, "DestroySemaphore"))
3711 return (PFN_vkVoidFunction)DestroySemaphore;
3712 if (!strcmp(name, "CreateEvent"))
3713 return (PFN_vkVoidFunction)CreateEvent;
3714 if (!strcmp(name, "DestroyEvent"))
3715 return (PFN_vkVoidFunction)DestroyEvent;
3716 if (!strcmp(name, "GetEventStatus"))
3717 return (PFN_vkVoidFunction)GetEventStatus;
3718 if (!strcmp(name, "SetEvent"))
3719 return (PFN_vkVoidFunction)SetEvent;
3720 if (!strcmp(name, "ResetEvent"))
3721 return (PFN_vkVoidFunction)ResetEvent;
3722 if (!strcmp(name, "CreateQueryPool"))
3723 return (PFN_vkVoidFunction)CreateQueryPool;
3724 if (!strcmp(name, "DestroyQueryPool"))
3725 return (PFN_vkVoidFunction)DestroyQueryPool;
3726 if (!strcmp(name, "GetQueryPoolResults"))
3727 return (PFN_vkVoidFunction)GetQueryPoolResults;
3728 if (!strcmp(name, "CreateBuffer"))
3729 return (PFN_vkVoidFunction)CreateBuffer;
3730 if (!strcmp(name, "DestroyBuffer"))
3731 return (PFN_vkVoidFunction)DestroyBuffer;
3732 if (!strcmp(name, "CreateBufferView"))
3733 return (PFN_vkVoidFunction)CreateBufferView;
3734 if (!strcmp(name, "DestroyBufferView"))
3735 return (PFN_vkVoidFunction)DestroyBufferView;
3736 if (!strcmp(name, "CreateImage"))
3737 return (PFN_vkVoidFunction)CreateImage;
3738 if (!strcmp(name, "DestroyImage"))
3739 return (PFN_vkVoidFunction)DestroyImage;
3740 if (!strcmp(name, "GetImageSubresourceLayout"))
3741 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3742 if (!strcmp(name, "CreateImageView"))
3743 return (PFN_vkVoidFunction)CreateImageView;
3744 if (!strcmp(name, "DestroyImageView"))
3745 return (PFN_vkVoidFunction)DestroyImageView;
3746 if (!strcmp(name, "CreateShaderModule"))
3747 return (PFN_vkVoidFunction)CreateShaderModule;
3748 if (!strcmp(name, "DestroyShaderModule"))
3749 return (PFN_vkVoidFunction)DestroyShaderModule;
3750 if (!strcmp(name, "CreatePipelineCache"))
3751 return (PFN_vkVoidFunction)CreatePipelineCache;
3752 if (!strcmp(name, "DestroyPipelineCache"))
3753 return (PFN_vkVoidFunction)DestroyPipelineCache;
3754 if (!strcmp(name, "GetPipelineCacheData"))
3755 return (PFN_vkVoidFunction)GetPipelineCacheData;
3756 if (!strcmp(name, "MergePipelineCaches"))
3757 return (PFN_vkVoidFunction)MergePipelineCaches;
3758 if (!strcmp(name, "CreateGraphicsPipelines"))
3759 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3760 if (!strcmp(name, "CreateComputePipelines"))
3761 return (PFN_vkVoidFunction)CreateComputePipelines;
3762 if (!strcmp(name, "DestroyPipeline"))
3763 return (PFN_vkVoidFunction)DestroyPipeline;
3764 if (!strcmp(name, "CreatePipelineLayout"))
3765 return (PFN_vkVoidFunction)CreatePipelineLayout;
3766 if (!strcmp(name, "DestroyPipelineLayout"))
3767 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3768 if (!strcmp(name, "CreateSampler"))
3769 return (PFN_vkVoidFunction)CreateSampler;
3770 if (!strcmp(name, "DestroySampler"))
3771 return (PFN_vkVoidFunction)DestroySampler;
3772 if (!strcmp(name, "CreateDescriptorSetLayout"))
3773 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3774 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3775 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3776 if (!strcmp(name, "CreateDescriptorPool"))
3777 return (PFN_vkVoidFunction)CreateDescriptorPool;
3778 if (!strcmp(name, "DestroyDescriptorPool"))
3779 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3780 if (!strcmp(name, "ResetDescriptorPool"))
3781 return (PFN_vkVoidFunction)ResetDescriptorPool;
3782 if (!strcmp(name, "AllocateDescriptorSets"))
3783 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3784 if (!strcmp(name, "FreeDescriptorSets"))
3785 return (PFN_vkVoidFunction)FreeDescriptorSets;
3786 if (!strcmp(name, "UpdateDescriptorSets"))
3787 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3788 if (!strcmp(name, "CreateFramebuffer"))
3789 return (PFN_vkVoidFunction)CreateFramebuffer;
3790 if (!strcmp(name, "DestroyFramebuffer"))
3791 return (PFN_vkVoidFunction)DestroyFramebuffer;
3792 if (!strcmp(name, "CreateRenderPass"))
3793 return (PFN_vkVoidFunction)CreateRenderPass;
3794 if (!strcmp(name, "DestroyRenderPass"))
3795 return (PFN_vkVoidFunction)DestroyRenderPass;
3796 if (!strcmp(name, "GetRenderAreaGranularity"))
3797 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3798 if (!strcmp(name, "CreateCommandPool"))
3799 return (PFN_vkVoidFunction)CreateCommandPool;
3800 if (!strcmp(name, "DestroyCommandPool"))
3801 return (PFN_vkVoidFunction)DestroyCommandPool;
3802 if (!strcmp(name, "ResetCommandPool"))
3803 return (PFN_vkVoidFunction)ResetCommandPool;
3804 if (!strcmp(name, "AllocateCommandBuffers"))
3805 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3806 if (!strcmp(name, "FreeCommandBuffers"))
3807 return (PFN_vkVoidFunction)FreeCommandBuffers;
3808 if (!strcmp(name, "BeginCommandBuffer"))
3809 return (PFN_vkVoidFunction)BeginCommandBuffer;
3810 if (!strcmp(name, "EndCommandBuffer"))
3811 return (PFN_vkVoidFunction)EndCommandBuffer;
3812 if (!strcmp(name, "ResetCommandBuffer"))
3813 return (PFN_vkVoidFunction)ResetCommandBuffer;
3814 if (!strcmp(name, "CmdBindPipeline"))
3815 return (PFN_vkVoidFunction)CmdBindPipeline;
3816 if (!strcmp(name, "CmdSetViewport"))
3817 return (PFN_vkVoidFunction)CmdSetViewport;
3818 if (!strcmp(name, "CmdSetScissor"))
3819 return (PFN_vkVoidFunction)CmdSetScissor;
3820 if (!strcmp(name, "CmdSetLineWidth"))
3821 return (PFN_vkVoidFunction)CmdSetLineWidth;
3822 if (!strcmp(name, "CmdSetDepthBias"))
3823 return (PFN_vkVoidFunction)CmdSetDepthBias;
3824 if (!strcmp(name, "CmdSetBlendConstants"))
3825 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3826 if (!strcmp(name, "CmdSetDepthBounds"))
3827 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3828 if (!strcmp(name, "CmdSetStencilCompareMask"))
3829 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3830 if (!strcmp(name, "CmdSetStencilWriteMask"))
3831 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3832 if (!strcmp(name, "CmdSetStencilReference"))
3833 return (PFN_vkVoidFunction)CmdSetStencilReference;
3834 if (!strcmp(name, "CmdBindDescriptorSets"))
3835 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3836 if (!strcmp(name, "CmdBindIndexBuffer"))
3837 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3838 if (!strcmp(name, "CmdBindVertexBuffers"))
3839 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3840 if (!strcmp(name, "CmdDraw"))
3841 return (PFN_vkVoidFunction)CmdDraw;
3842 if (!strcmp(name, "CmdDrawIndexed"))
3843 return (PFN_vkVoidFunction)CmdDrawIndexed;
3844 if (!strcmp(name, "CmdDrawIndirect"))
3845 return (PFN_vkVoidFunction)CmdDrawIndirect;
3846 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3847 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3848 if (!strcmp(name, "CmdDispatch"))
3849 return (PFN_vkVoidFunction)CmdDispatch;
3850 if (!strcmp(name, "CmdDispatchIndirect"))
3851 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3852 if (!strcmp(name, "CmdCopyBuffer"))
3853 return (PFN_vkVoidFunction)CmdCopyBuffer;
3854 if (!strcmp(name, "CmdCopyImage"))
3855 return (PFN_vkVoidFunction)CmdCopyImage;
3856 if (!strcmp(name, "CmdBlitImage"))
3857 return (PFN_vkVoidFunction)CmdBlitImage;
3858 if (!strcmp(name, "CmdCopyBufferToImage"))
3859 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3860 if (!strcmp(name, "CmdCopyImageToBuffer"))
3861 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3862 if (!strcmp(name, "CmdUpdateBuffer"))
3863 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3864 if (!strcmp(name, "CmdFillBuffer"))
3865 return (PFN_vkVoidFunction)CmdFillBuffer;
3866 if (!strcmp(name, "CmdClearColorImage"))
3867 return (PFN_vkVoidFunction)CmdClearColorImage;
3868 if (!strcmp(name, "CmdClearDepthStencilImage"))
3869 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3870 if (!strcmp(name, "CmdClearAttachments"))
3871 return (PFN_vkVoidFunction)CmdClearAttachments;
3872 if (!strcmp(name, "CmdResolveImage"))
3873 return (PFN_vkVoidFunction)CmdResolveImage;
3874 if (!strcmp(name, "CmdSetEvent"))
3875 return (PFN_vkVoidFunction)CmdSetEvent;
3876 if (!strcmp(name, "CmdResetEvent"))
3877 return (PFN_vkVoidFunction)CmdResetEvent;
3878 if (!strcmp(name, "CmdWaitEvents"))
3879 return (PFN_vkVoidFunction)CmdWaitEvents;
3880 if (!strcmp(name, "CmdPipelineBarrier"))
3881 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3882 if (!strcmp(name, "CmdBeginQuery"))
3883 return (PFN_vkVoidFunction)CmdBeginQuery;
3884 if (!strcmp(name, "CmdEndQuery"))
3885 return (PFN_vkVoidFunction)CmdEndQuery;
3886 if (!strcmp(name, "CmdResetQueryPool"))
3887 return (PFN_vkVoidFunction)CmdResetQueryPool;
3888 if (!strcmp(name, "CmdWriteTimestamp"))
3889 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3890 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3891 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3892 if (!strcmp(name, "CmdPushConstants"))
3893 return (PFN_vkVoidFunction)CmdPushConstants;
3894 if (!strcmp(name, "CmdBeginRenderPass"))
3895 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3896 if (!strcmp(name, "CmdNextSubpass"))
3897 return (PFN_vkVoidFunction)CmdNextSubpass;
3898 if (!strcmp(name, "CmdEndRenderPass"))
3899 return (PFN_vkVoidFunction)CmdEndRenderPass;
3900 if (!strcmp(name, "CmdExecuteCommands"))
3901 return (PFN_vkVoidFunction)CmdExecuteCommands;
3902
3903 return NULL;
3904}
3905static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3906 if (!name || name[0] != 'v' || name[1] != 'k')
3907 return NULL;
3908
3909 name += 2;
3910 if (!strcmp(name, "CreateInstance"))
3911 return (PFN_vkVoidFunction)CreateInstance;
3912 if (!strcmp(name, "DestroyInstance"))
3913 return (PFN_vkVoidFunction)DestroyInstance;
3914 if (!strcmp(name, "EnumeratePhysicalDevices"))
3915 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
3916 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
3917 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
3918 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
3919 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
3920 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
3921 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
3922 if (!strcmp(name, "GetPhysicalDeviceProperties"))
3923 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
3924 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
3925 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
3926 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
3927 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
3928 if (!strcmp(name, "GetInstanceProcAddr"))
3929 return (PFN_vkVoidFunction)GetInstanceProcAddr;
3930 if (!strcmp(name, "CreateDevice"))
3931 return (PFN_vkVoidFunction)CreateDevice;
3932 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
3933 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
3934 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
3935 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
3936 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
3937 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
3938 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
3939 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
3940
3941 return NULL;
3942}
3943
3944static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
3945 if (device) {
3946 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Mark Youngead9b932016-09-08 12:28:38 -06003947
3948 if (device_data->wsi_enabled) {
3949 if (!strcmp("vkCreateSwapchainKHR", name))
3950 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
3951 if (!strcmp("vkDestroySwapchainKHR", name))
3952 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
3953 if (!strcmp("vkGetSwapchainImagesKHR", name))
3954 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
3955 if (!strcmp("vkAcquireNextImageKHR", name))
3956 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
3957 if (!strcmp("vkQueuePresentKHR", name))
3958 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
3959 }
3960
3961 if (device_data->wsi_display_swapchain_enabled) {
3962 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
3963 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
3964 }
3965 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003966 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003967
3968 return nullptr;
3969}
3970
3971VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
3972 PFN_vkVoidFunction addr;
3973 addr = InterceptCoreDeviceCommand(funcName);
3974 if (addr) {
3975 return addr;
3976 }
3977 assert(device);
3978
3979 addr = InterceptWsiEnabledCommand(funcName, device);
3980 if (addr) {
3981 return addr;
3982 }
3983 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
3984 return NULL;
3985 }
3986 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
3987}
3988
3989VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
3990 PFN_vkVoidFunction addr;
3991 addr = InterceptCoreInstanceCommand(funcName);
3992 if (!addr) {
3993 addr = InterceptCoreDeviceCommand(funcName);
3994 }
3995 if (!addr) {
3996 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
3997 }
3998 if (addr) {
3999 return addr;
4000 }
4001 assert(instance);
4002
4003 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
4004 if (addr) {
4005 return addr;
4006 }
4007 addr = InterceptWsiEnabledCommand(funcName, instance);
4008 if (addr) {
4009 return addr;
4010 }
4011 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
4012 return NULL;
4013 }
4014 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
4015}
4016
4017} // namespace object_tracker
4018
4019// vk_layer_logging.h expects these to be defined
4020VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
4021 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
4022 const VkAllocationCallbacks *pAllocator,
4023 VkDebugReportCallbackEXT *pMsgCallback) {
4024 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
4025}
4026
4027VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
4028 const VkAllocationCallbacks *pAllocator) {
4029 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
4030}
4031
4032VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
4033 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
4034 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
4035 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
4036}
4037
4038// Loader-layer interface v0, just wrappers since there is only a layer
4039VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
4040 VkExtensionProperties *pProperties) {
4041 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
4042}
4043
4044VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
4045 VkLayerProperties *pProperties) {
4046 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
4047}
4048
4049VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
4050 VkLayerProperties *pProperties) {
4051 // The layer command handles VK_NULL_HANDLE just fine internally
4052 assert(physicalDevice == VK_NULL_HANDLE);
4053 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
4054}
4055
4056VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
4057 return object_tracker::GetDeviceProcAddr(dev, funcName);
4058}
4059
4060VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
4061 return object_tracker::GetInstanceProcAddr(instance, funcName);
4062}
4063
4064VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
4065 const char *pLayerName, uint32_t *pCount,
4066 VkExtensionProperties *pProperties) {
4067 // The layer command handles VK_NULL_HANDLE just fine internally
4068 assert(physicalDevice == VK_NULL_HANDLE);
4069 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
Mark Lobodzinski38080682016-07-22 15:30:27 -06004070}