blob: a1f23077b0a58eea8d6535309180342fe11aa101 [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600155 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
156 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64
157 ". Was it created? Has it already been destroyed?",
158 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600200 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
201 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64
202 ". Was it created? Has it already been destroyed?",
203 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
245template <typename T1, typename T2>
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300246static void CreateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600247 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
248
249 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
250 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
251 object_name[object_type], reinterpret_cast<uint64_t>(object));
252
253 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
254 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300255 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600256 pNewObjNode->handle = reinterpret_cast<uint64_t>(object);
257 instance_data->object_map[object_type][reinterpret_cast<uint64_t>(object)] = pNewObjNode;
258 instance_data->num_objects[object_type]++;
259 instance_data->num_total_objects++;
260}
261
262template <typename T1, typename T2>
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300263static void CreateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600264 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
265
266 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t &>(object),
267 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
268 object_name[object_type], reinterpret_cast<uint64_t &>(object));
269
270 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
271 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300272 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600273 pNewObjNode->handle = reinterpret_cast<uint64_t &>(object);
274 device_data->object_map[object_type][reinterpret_cast<uint64_t &>(object)] = pNewObjNode;
275 device_data->num_objects[object_type]++;
276 device_data->num_total_objects++;
277}
278
279template <typename T1, typename T2>
Chris Forbesec461992016-09-29 14:41:44 +1300280static void DestroyObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600281 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
282
283 uint64_t object_handle = reinterpret_cast<uint64_t &>(object);
284
285 auto item = device_data->object_map[object_type].find(object_handle);
286 if (item != device_data->object_map[object_type].end()) {
287
288 OBJTRACK_NODE *pNode = item->second;
289 assert(device_data->num_total_objects > 0);
290 device_data->num_total_objects--;
291 assert(device_data->num_objects[pNode->object_type] > 0);
292 device_data->num_objects[pNode->object_type]--;
293
294 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
295 OBJTRACK_NONE, LayerName,
296 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
297 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
298 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
299
Chris Forbes3e51a202016-09-29 14:35:09 +1300300 auto allocated_with_custom = pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR;
301 if (custom_allocator ^ allocated_with_custom) {
302 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__,
303 OBJTRACK_ALLOCATOR_MISMATCH, LayerName,
304 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation",
305 (custom_allocator ? "" : "not "), object_name[object_type], object_handle,
306 (allocated_with_custom ? "" : "not "));
307 }
308
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600309 delete pNode;
310 device_data->object_map[object_type].erase(item);
311 } else {
312 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
313 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600314 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
315 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600316 }
317}
318
319template <typename T1, typename T2>
320static bool ValidateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
321 bool null_allowed) {
322 if (null_allowed && (object == VK_NULL_HANDLE)) {
323 return false;
324 }
325 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
326
327 if (instance_data->object_map[object_type].find(reinterpret_cast<uint64_t>(object)) ==
328 instance_data->object_map[object_type].end()) {
329 return log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600330 __LINE__, OBJTRACK_INVALID_OBJECT, LayerName, "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600331 reinterpret_cast<uint64_t>(object));
332 }
333 return false;
334}
335
336template <typename T1, typename T2>
337static bool ValidateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
338 bool null_allowed) {
339 if (null_allowed && (object == VK_NULL_HANDLE)) {
340 return false;
341 }
342 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
343 if (device_data->object_map[object_type].find(reinterpret_cast<uint64_t &>(object)) ==
344 device_data->object_map[object_type].end()) {
345 // If object is an image, also look for it in the swapchain image map
346 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
347 (device_data->swapchainImageMap.find(reinterpret_cast<uint64_t &>(object)) == device_data->swapchainImageMap.end())) {
348 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
349 reinterpret_cast<uint64_t &>(object), __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600350 "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type], reinterpret_cast<uint64_t &>(object));
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600351 }
352 }
353 return false;
354}
355
356static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
357 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
358 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
359 OBJTRACK_NODE *object_info = item->second;
360 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
361 OBJTRACK_OBJECT_LEAK, LayerName,
362 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
363 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
364 item = device_data->object_map[object_type].erase(item);
365 }
366}
367
368VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
369 std::unique_lock<std::mutex> lock(global_lock);
370
371 dispatch_key key = get_dispatch_key(instance);
372 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
373
374 // Enable the temporary callback(s) here to catch cleanup issues:
375 bool callback_setup = false;
376 if (instance_data->num_tmp_callbacks > 0) {
377 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
378 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
379 callback_setup = true;
380 }
381 }
382
383 ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
384
Chris Forbesec461992016-09-29 14:41:44 +1300385 DestroyObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600386 // Report any remaining objects in LL
387
388 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
389 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
390 OBJTRACK_NODE *pNode = iit->second;
391
392 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
393
394 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
395 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
396 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
397 // Semaphore:
398 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
399 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
400 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
401 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
402 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
403 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
404 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
405 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
406 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
407 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
408 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
409 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
410 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
411 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
412 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
413 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
414 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
415 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
416 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
417 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
418 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
419 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
420 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
421 }
422 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
423
424 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
425 pInstanceTable->DestroyInstance(instance, pAllocator);
426
427 // Disable and cleanup the temporary callback(s):
428 if (callback_setup) {
429 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
430 }
431 if (instance_data->num_tmp_callbacks > 0) {
432 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
433 instance_data->num_tmp_callbacks = 0;
434 }
435
436 // Clean up logging callback, if any
437 while (instance_data->logging_callback.size() > 0) {
438 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
439 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
440 instance_data->logging_callback.pop_back();
441 }
442
443 layer_debug_report_destroy_instance(instance_data->report_data);
444 layer_data_map.erase(key);
445
446 instanceExtMap.erase(pInstanceTable);
447 lock.unlock();
448 ot_instance_table_map.erase(key);
449}
450
451VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
452
453 std::unique_lock<std::mutex> lock(global_lock);
454 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Chris Forbesec461992016-09-29 14:41:44 +1300455 DestroyObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600456
457 // Report any remaining objects associated with this VkDevice object in LL
458 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
459 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
460 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
461 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
462 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
463 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
464 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
465 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
466 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
467 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
468 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
469 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
470 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
471 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
472 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
473 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
474 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
475 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
476 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
477 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
478 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
479 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
480 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
481
482 // Clean up Queue's MemRef Linked Lists
483 DestroyQueueDataStructures(device);
484
485 lock.unlock();
486
487 dispatch_key key = get_dispatch_key(device);
488 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
489 pDisp->DestroyDevice(device, pAllocator);
490 ot_device_table_map.erase(key);
491}
492
493VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
494 bool skip_call = false;
495 {
496 std::lock_guard<std::mutex> lock(global_lock);
497 skip_call |=
498 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
499 }
500 if (skip_call) {
501 return;
502 }
503 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
504}
505
506VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
507 VkFormatProperties *pFormatProperties) {
508 bool skip_call = false;
509 {
510 std::lock_guard<std::mutex> lock(global_lock);
511 skip_call |=
512 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
513 }
514 if (skip_call) {
515 return;
516 }
517 get_dispatch_table(ot_instance_table_map, physicalDevice)
518 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
519}
520
521VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
522 VkImageType type, VkImageTiling tiling,
523 VkImageUsageFlags usage, VkImageCreateFlags flags,
524 VkImageFormatProperties *pImageFormatProperties) {
525 bool skip_call = false;
526 {
527 std::lock_guard<std::mutex> lock(global_lock);
528 skip_call |=
529 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
530 }
531 if (skip_call) {
532 return VK_ERROR_VALIDATION_FAILED_EXT;
533 }
534 VkResult result =
535 get_dispatch_table(ot_instance_table_map, physicalDevice)
536 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
537 return result;
538}
539
540VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
541 bool skip_call = false;
542 {
543 std::lock_guard<std::mutex> lock(global_lock);
544 skip_call |=
545 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
546 }
547 if (skip_call) {
548 return;
549 }
550 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
551}
552
553VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
554 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
555 bool skip_call = false;
556 {
557 std::lock_guard<std::mutex> lock(global_lock);
558 skip_call |=
559 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
560 }
561 if (skip_call) {
562 return;
563 }
564 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
565}
566
567VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
568
569VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
570
571VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
572 VkExtensionProperties *pProperties);
573
574VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
575
576VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
577 VkLayerProperties *pProperties);
578
579VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
580 bool skip_call = false;
581 {
582 std::lock_guard<std::mutex> lock(global_lock);
583 skip_call |= ValidateNonDispatchableObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
584 if (pSubmits) {
585 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
586 if (pSubmits[idx0].pCommandBuffers) {
587 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
588 skip_call |= ValidateDispatchableObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
589 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
590 }
591 }
592 if (pSubmits[idx0].pSignalSemaphores) {
593 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
594 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
595 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
596 }
597 }
598 if (pSubmits[idx0].pWaitSemaphores) {
599 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
600 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
601 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
602 }
603 }
604 }
605 }
606 if (queue) {
607 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
608 }
609 }
610 if (skip_call) {
611 return VK_ERROR_VALIDATION_FAILED_EXT;
612 }
613 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
614 return result;
615}
616
617VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
618 bool skip_call = false;
619 {
620 std::lock_guard<std::mutex> lock(global_lock);
621 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
622 }
623 if (skip_call) {
624 return VK_ERROR_VALIDATION_FAILED_EXT;
625 }
626 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
627 return result;
628}
629
630VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
631 bool skip_call = false;
632 {
633 std::lock_guard<std::mutex> lock(global_lock);
634 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
635 }
636 if (skip_call) {
637 return VK_ERROR_VALIDATION_FAILED_EXT;
638 }
639 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
640 return result;
641}
642
643VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
644 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
645 bool skip_call = false;
646 {
647 std::lock_guard<std::mutex> lock(global_lock);
648 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
649 }
650 if (skip_call) {
651 return VK_ERROR_VALIDATION_FAILED_EXT;
652 }
653 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
654 {
655 std::lock_guard<std::mutex> lock(global_lock);
656 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300657 CreateNonDispatchableObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600658 }
659 }
660 return result;
661}
662
663VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
664 const VkMappedMemoryRange *pMemoryRanges) {
665 bool skip_call = false;
666 {
667 std::lock_guard<std::mutex> lock(global_lock);
668 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
669 if (pMemoryRanges) {
670 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
671 if (pMemoryRanges[idx0].memory) {
672 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
673 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
674 }
675 }
676 }
677 }
678 if (skip_call) {
679 return VK_ERROR_VALIDATION_FAILED_EXT;
680 }
681 VkResult result =
682 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
683 return result;
684}
685
686VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
687 const VkMappedMemoryRange *pMemoryRanges) {
688 bool skip_call = false;
689 {
690 std::lock_guard<std::mutex> lock(global_lock);
691 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
692 if (pMemoryRanges) {
693 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
694 if (pMemoryRanges[idx0].memory) {
695 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
696 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
697 }
698 }
699 }
700 }
701 if (skip_call) {
702 return VK_ERROR_VALIDATION_FAILED_EXT;
703 }
704 VkResult result =
705 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
706 return result;
707}
708
709VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
710 VkDeviceSize *pCommittedMemoryInBytes) {
711 bool skip_call = false;
712 {
713 std::lock_guard<std::mutex> lock(global_lock);
714 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
715 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
716 }
717 if (skip_call) {
718 return;
719 }
720 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
721}
722
723VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
724 VkDeviceSize memoryOffset) {
725 bool skip_call = false;
726 {
727 std::lock_guard<std::mutex> lock(global_lock);
728 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
729 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
730 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
731 }
732 if (skip_call) {
733 return VK_ERROR_VALIDATION_FAILED_EXT;
734 }
735 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
736 return result;
737}
738
739VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
740 bool skip_call = false;
741 {
742 std::lock_guard<std::mutex> lock(global_lock);
743 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
744 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
745 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
746 }
747 if (skip_call) {
748 return VK_ERROR_VALIDATION_FAILED_EXT;
749 }
750 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
751 return result;
752}
753
754VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
755 VkMemoryRequirements *pMemoryRequirements) {
756 bool skip_call = false;
757 {
758 std::lock_guard<std::mutex> lock(global_lock);
759 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
760 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
761 }
762 if (skip_call) {
763 return;
764 }
765 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
766}
767
768VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
769 bool skip_call = false;
770 {
771 std::lock_guard<std::mutex> lock(global_lock);
772 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
773 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
774 }
775 if (skip_call) {
776 return;
777 }
778 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
779}
780
781VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
782 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
783 bool skip_call = false;
784 {
785 std::lock_guard<std::mutex> lock(global_lock);
786 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
787 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
788 }
789 if (skip_call) {
790 return;
791 }
792 get_dispatch_table(ot_device_table_map, device)
793 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
794}
795
796VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
797 VkImageType type, VkSampleCountFlagBits samples,
798 VkImageUsageFlags usage, VkImageTiling tiling,
799 uint32_t *pPropertyCount,
800 VkSparseImageFormatProperties *pProperties) {
801 bool skip_call = false;
802 {
803 std::lock_guard<std::mutex> lock(global_lock);
804 skip_call |=
805 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
806 }
807 if (skip_call) {
808 return;
809 }
810 get_dispatch_table(ot_instance_table_map, physicalDevice)
811 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
812 pProperties);
813}
814
815VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
816 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
817 bool skip_call = false;
818 {
819 std::lock_guard<std::mutex> lock(global_lock);
820 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
821 }
822 if (skip_call) {
823 return VK_ERROR_VALIDATION_FAILED_EXT;
824 }
825 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
826 {
827 std::lock_guard<std::mutex> lock(global_lock);
828 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300829 CreateNonDispatchableObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600830 }
831 }
832 return result;
833}
834
835VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
836 bool skip_call = false;
837 {
838 std::lock_guard<std::mutex> lock(global_lock);
839 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
840 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
841 }
842 if (skip_call) {
843 return;
844 }
845 {
846 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300847 DestroyObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600848 }
849 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
850}
851
852VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
853 bool skip_call = false;
854 {
855 std::lock_guard<std::mutex> lock(global_lock);
856 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
857 if (pFences) {
858 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
859 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
860 }
861 }
862 }
863 if (skip_call) {
864 return VK_ERROR_VALIDATION_FAILED_EXT;
865 }
866 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
867 return result;
868}
869
870VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
871 bool skip_call = false;
872 {
873 std::lock_guard<std::mutex> lock(global_lock);
874 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
875 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
876 }
877 if (skip_call) {
878 return VK_ERROR_VALIDATION_FAILED_EXT;
879 }
880 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
881 return result;
882}
883
884VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
885 uint64_t timeout) {
886 bool skip_call = false;
887 {
888 std::lock_guard<std::mutex> lock(global_lock);
889 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
890 if (pFences) {
891 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
892 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
893 }
894 }
895 }
896 if (skip_call) {
897 return VK_ERROR_VALIDATION_FAILED_EXT;
898 }
899 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
900 return result;
901}
902
903VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
904 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
905 bool skip_call = false;
906 {
907 std::lock_guard<std::mutex> lock(global_lock);
908 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
909 }
910 if (skip_call) {
911 return VK_ERROR_VALIDATION_FAILED_EXT;
912 }
913 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
914 {
915 std::lock_guard<std::mutex> lock(global_lock);
916 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300917 CreateNonDispatchableObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600918 }
919 }
920 return result;
921}
922
923VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
924 bool skip_call = false;
925 {
926 std::lock_guard<std::mutex> lock(global_lock);
927 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
928 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
929 }
930 if (skip_call) {
931 return;
932 }
933 {
934 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300935 DestroyObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600936 }
937 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
938}
939
940VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
941 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
942 bool skip_call = false;
943 {
944 std::lock_guard<std::mutex> lock(global_lock);
945 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
946 }
947 if (skip_call) {
948 return VK_ERROR_VALIDATION_FAILED_EXT;
949 }
950 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
951 {
952 std::lock_guard<std::mutex> lock(global_lock);
953 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300954 CreateNonDispatchableObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600955 }
956 }
957 return result;
958}
959
960VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
961 bool skip_call = false;
962 {
963 std::lock_guard<std::mutex> lock(global_lock);
964 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
965 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
966 }
967 if (skip_call) {
968 return;
969 }
970 {
971 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300972 DestroyObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600973 }
974 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
975}
976
977VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
978 bool skip_call = false;
979 {
980 std::lock_guard<std::mutex> lock(global_lock);
981 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
982 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
983 }
984 if (skip_call) {
985 return VK_ERROR_VALIDATION_FAILED_EXT;
986 }
987 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
988 return result;
989}
990
991VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
992 bool skip_call = false;
993 {
994 std::lock_guard<std::mutex> lock(global_lock);
995 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
996 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
997 }
998 if (skip_call) {
999 return VK_ERROR_VALIDATION_FAILED_EXT;
1000 }
1001 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
1002 return result;
1003}
1004
1005VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
1006 bool skip_call = false;
1007 {
1008 std::lock_guard<std::mutex> lock(global_lock);
1009 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1010 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
1011 }
1012 if (skip_call) {
1013 return VK_ERROR_VALIDATION_FAILED_EXT;
1014 }
1015 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
1016 return result;
1017}
1018
1019VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1020 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1021 bool skip_call = false;
1022 {
1023 std::lock_guard<std::mutex> lock(global_lock);
1024 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1025 }
1026 if (skip_call) {
1027 return VK_ERROR_VALIDATION_FAILED_EXT;
1028 }
1029 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1030 {
1031 std::lock_guard<std::mutex> lock(global_lock);
1032 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001033 CreateNonDispatchableObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001034 }
1035 }
1036 return result;
1037}
1038
1039VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1040 bool skip_call = false;
1041 {
1042 std::lock_guard<std::mutex> lock(global_lock);
1043 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1044 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1045 }
1046 if (skip_call) {
1047 return;
1048 }
1049 {
1050 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001051 DestroyObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001052 }
1053 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1054}
1055
1056VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1057 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1058 bool skip_call = false;
1059 {
1060 std::lock_guard<std::mutex> lock(global_lock);
1061 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1062 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1063 }
1064 if (skip_call) {
1065 return VK_ERROR_VALIDATION_FAILED_EXT;
1066 }
1067 VkResult result = get_dispatch_table(ot_device_table_map, device)
1068 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1069 return result;
1070}
1071
1072VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1073 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1074 bool skip_call = false;
1075 {
1076 std::lock_guard<std::mutex> lock(global_lock);
1077 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1078 }
1079 if (skip_call) {
1080 return VK_ERROR_VALIDATION_FAILED_EXT;
1081 }
1082 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1083 {
1084 std::lock_guard<std::mutex> lock(global_lock);
1085 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001086 CreateNonDispatchableObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001087 }
1088 }
1089 return result;
1090}
1091
1092VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1093 bool skip_call = false;
1094 {
1095 std::lock_guard<std::mutex> lock(global_lock);
1096 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1097 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1098 }
1099 if (skip_call) {
1100 return;
1101 }
1102 {
1103 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001104 DestroyObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001105 }
1106 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1107}
1108
1109VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1110 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1111 bool skip_call = false;
1112 {
1113 std::lock_guard<std::mutex> lock(global_lock);
1114 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1115 if (pCreateInfo) {
1116 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1117 }
1118 }
1119 if (skip_call) {
1120 return VK_ERROR_VALIDATION_FAILED_EXT;
1121 }
1122 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1123 {
1124 std::lock_guard<std::mutex> lock(global_lock);
1125 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001126 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001127 }
1128 }
1129 return result;
1130}
1131
1132VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1133 bool skip_call = false;
1134 {
1135 std::lock_guard<std::mutex> lock(global_lock);
1136 skip_call |= ValidateNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1137 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1138 }
1139 if (skip_call) {
1140 return;
1141 }
1142 {
1143 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001144 DestroyObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001145 }
1146 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1147}
1148
1149VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1150 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1151 bool skip_call = false;
1152 {
1153 std::lock_guard<std::mutex> lock(global_lock);
1154 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1155 }
1156 if (skip_call) {
1157 return VK_ERROR_VALIDATION_FAILED_EXT;
1158 }
1159 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1160 {
1161 std::lock_guard<std::mutex> lock(global_lock);
1162 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001163 CreateNonDispatchableObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001164 }
1165 }
1166 return result;
1167}
1168
1169VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1170 bool skip_call = false;
1171 {
1172 std::lock_guard<std::mutex> lock(global_lock);
1173 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1174 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1175 }
1176 if (skip_call) {
1177 return;
1178 }
1179 {
1180 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001181 DestroyObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001182 }
1183 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1184}
1185
1186VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1187 VkSubresourceLayout *pLayout) {
1188 bool skip_call = false;
1189 {
1190 std::lock_guard<std::mutex> lock(global_lock);
1191 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1192 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1193 }
1194 if (skip_call) {
1195 return;
1196 }
1197 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1198}
1199
1200VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1201 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1202 bool skip_call = false;
1203 {
1204 std::lock_guard<std::mutex> lock(global_lock);
1205 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1206 if (pCreateInfo) {
1207 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1208 }
1209 }
1210 if (skip_call) {
1211 return VK_ERROR_VALIDATION_FAILED_EXT;
1212 }
1213 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1214 {
1215 std::lock_guard<std::mutex> lock(global_lock);
1216 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001217 CreateNonDispatchableObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001218 }
1219 }
1220 return result;
1221}
1222
1223VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1224 bool skip_call = false;
1225 {
1226 std::lock_guard<std::mutex> lock(global_lock);
1227 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1228 skip_call |= ValidateNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1229 }
1230 if (skip_call) {
1231 return;
1232 }
1233 {
1234 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001235 DestroyObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001236 }
1237 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1238}
1239
1240VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1241 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1242 bool skip_call = false;
1243 {
1244 std::lock_guard<std::mutex> lock(global_lock);
1245 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1246 }
1247 if (skip_call) {
1248 return VK_ERROR_VALIDATION_FAILED_EXT;
1249 }
1250 VkResult result =
1251 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1252 {
1253 std::lock_guard<std::mutex> lock(global_lock);
1254 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001255 CreateNonDispatchableObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001256 }
1257 }
1258 return result;
1259}
1260
1261VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1262 const VkAllocationCallbacks *pAllocator) {
1263 bool skip_call = false;
1264 {
1265 std::lock_guard<std::mutex> lock(global_lock);
1266 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1267 skip_call |= ValidateNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
1268 }
1269 if (skip_call) {
1270 return;
1271 }
1272 {
1273 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001274 DestroyObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001275 }
1276 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1277}
1278
1279VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1280 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1281 bool skip_call = false;
1282 {
1283 std::lock_guard<std::mutex> lock(global_lock);
1284 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1285 }
1286 if (skip_call) {
1287 return VK_ERROR_VALIDATION_FAILED_EXT;
1288 }
1289 VkResult result =
1290 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1291 {
1292 std::lock_guard<std::mutex> lock(global_lock);
1293 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001294 CreateNonDispatchableObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001295 }
1296 }
1297 return result;
1298}
1299
1300VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1301 const VkAllocationCallbacks *pAllocator) {
1302 bool skip_call = false;
1303 {
1304 std::lock_guard<std::mutex> lock(global_lock);
1305 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1306 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1307 }
1308 if (skip_call) {
1309 return;
1310 }
1311 {
1312 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001313 DestroyObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001314 }
1315 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1316}
1317
1318VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1319 void *pData) {
1320 bool skip_call = false;
1321 {
1322 std::lock_guard<std::mutex> lock(global_lock);
1323 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1324 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1325 }
1326 if (skip_call) {
1327 return VK_ERROR_VALIDATION_FAILED_EXT;
1328 }
1329 VkResult result =
1330 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1331 return result;
1332}
1333
1334VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1335 const VkPipelineCache *pSrcCaches) {
1336 bool skip_call = false;
1337 {
1338 std::lock_guard<std::mutex> lock(global_lock);
1339 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1340 skip_call |= ValidateNonDispatchableObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1341 if (pSrcCaches) {
1342 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1343 skip_call |=
1344 ValidateNonDispatchableObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1345 }
1346 }
1347 }
1348 if (skip_call) {
1349 return VK_ERROR_VALIDATION_FAILED_EXT;
1350 }
1351 VkResult result =
1352 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1353 return result;
1354}
1355
1356VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1357 bool skip_call = false;
1358 {
1359 std::lock_guard<std::mutex> lock(global_lock);
1360 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1361 skip_call |= ValidateNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1362 }
1363 if (skip_call) {
1364 return;
1365 }
1366 {
1367 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001368 DestroyObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001369 }
1370 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1371}
1372
1373VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1374 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1375 bool skip_call = false;
1376 {
1377 std::lock_guard<std::mutex> lock(global_lock);
1378 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1379 if (pCreateInfo) {
1380 if (pCreateInfo->pSetLayouts) {
1381 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
1382 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pSetLayouts[idx0],
1383 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1384 }
1385 }
1386 }
1387 }
1388 if (skip_call) {
1389 return VK_ERROR_VALIDATION_FAILED_EXT;
1390 }
1391 VkResult result =
1392 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1393 {
1394 std::lock_guard<std::mutex> lock(global_lock);
1395 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001396 CreateNonDispatchableObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001397 }
1398 }
1399 return result;
1400}
1401
1402VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1403 const VkAllocationCallbacks *pAllocator) {
1404 bool skip_call = false;
1405 {
1406 std::lock_guard<std::mutex> lock(global_lock);
1407 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1408 skip_call |= ValidateNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1409 }
1410 if (skip_call) {
1411 return;
1412 }
1413 {
1414 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001415 DestroyObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001416 }
1417 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1418}
1419
1420VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1421 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1422 bool skip_call = false;
1423 {
1424 std::lock_guard<std::mutex> lock(global_lock);
1425 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1426 }
1427 if (skip_call) {
1428 return VK_ERROR_VALIDATION_FAILED_EXT;
1429 }
1430 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1431 {
1432 std::lock_guard<std::mutex> lock(global_lock);
1433 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001434 CreateNonDispatchableObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001435 }
1436 }
1437 return result;
1438}
1439
1440VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1441 bool skip_call = false;
1442 {
1443 std::lock_guard<std::mutex> lock(global_lock);
1444 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1445 skip_call |= ValidateNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1446 }
1447 if (skip_call) {
1448 return;
1449 }
1450 {
1451 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001452 DestroyObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001453 }
1454 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1455}
1456
1457VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1458 const VkAllocationCallbacks *pAllocator,
1459 VkDescriptorSetLayout *pSetLayout) {
1460 bool skip_call = false;
1461 {
1462 std::lock_guard<std::mutex> lock(global_lock);
1463 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1464 if (pCreateInfo) {
1465 if (pCreateInfo->pBindings) {
1466 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1467 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1468 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1469 skip_call |=
1470 ValidateNonDispatchableObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
1471 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1472 }
1473 }
1474 }
1475 }
1476 }
1477 }
1478 if (skip_call) {
1479 return VK_ERROR_VALIDATION_FAILED_EXT;
1480 }
1481 VkResult result =
1482 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1483 {
1484 std::lock_guard<std::mutex> lock(global_lock);
1485 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001486 CreateNonDispatchableObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001487 }
1488 }
1489 return result;
1490}
1491
1492VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1493 const VkAllocationCallbacks *pAllocator) {
1494 bool skip_call = false;
1495 {
1496 std::lock_guard<std::mutex> lock(global_lock);
1497 skip_call |= ValidateNonDispatchableObject(device, descriptorSetLayout,
1498 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1499 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1500 }
1501 if (skip_call) {
1502 return;
1503 }
1504 {
1505 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001506 DestroyObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001507 }
1508 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1509}
1510
1511VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1512 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1513 bool skip_call = false;
1514 {
1515 std::lock_guard<std::mutex> lock(global_lock);
1516 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1517 }
1518 if (skip_call) {
1519 return VK_ERROR_VALIDATION_FAILED_EXT;
1520 }
1521 VkResult result =
1522 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1523 {
1524 std::lock_guard<std::mutex> lock(global_lock);
1525 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001526 CreateNonDispatchableObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001527 }
1528 }
1529 return result;
1530}
1531
1532VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1533 VkDescriptorPoolResetFlags flags) {
1534 bool skip_call = false;
1535 {
1536 std::lock_guard<std::mutex> lock(global_lock);
1537 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1538 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1539 }
1540 if (skip_call) {
1541 return VK_ERROR_VALIDATION_FAILED_EXT;
1542 }
1543 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1544 return result;
1545}
1546
1547VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1548 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1549 const VkCopyDescriptorSet *pDescriptorCopies) {
1550 bool skip_call = false;
1551 {
1552 std::lock_guard<std::mutex> lock(global_lock);
1553 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1554 if (pDescriptorCopies) {
1555 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1556 if (pDescriptorCopies[idx0].dstSet) {
1557 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].dstSet,
1558 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1559 }
1560 if (pDescriptorCopies[idx0].srcSet) {
1561 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].srcSet,
1562 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1563 }
1564 }
1565 }
1566 if (pDescriptorWrites) {
1567 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1568 if (pDescriptorWrites[idx1].dstSet) {
1569 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].dstSet,
1570 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1571 }
1572 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1573 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1574 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1575 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1576 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1577 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
1578 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
1579 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1580 }
1581 }
1582 }
1583 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1584 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1585 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1586 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1587 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1588 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1589 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
1590 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
1591 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1592 }
1593 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
1594 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
1595 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1596 }
1597 }
1598 }
1599 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1600 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1601 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
1602 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
1603 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1604 }
1605 }
1606 }
1607 }
1608 }
1609 if (skip_call) {
1610 return;
1611 }
1612 get_dispatch_table(ot_device_table_map, device)
1613 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1614}
1615
1616VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1617 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1618 bool skip_call = false;
1619 {
1620 std::lock_guard<std::mutex> lock(global_lock);
1621 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1622 if (pCreateInfo) {
1623 if (pCreateInfo->pAttachments) {
1624 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
1625 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pAttachments[idx0],
1626 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1627 }
1628 }
1629 if (pCreateInfo->renderPass) {
1630 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->renderPass,
1631 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1632 }
1633 }
1634 }
1635 if (skip_call) {
1636 return VK_ERROR_VALIDATION_FAILED_EXT;
1637 }
1638 VkResult result =
1639 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1640 {
1641 std::lock_guard<std::mutex> lock(global_lock);
1642 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001643 CreateNonDispatchableObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001644 }
1645 }
1646 return result;
1647}
1648
1649VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1650 bool skip_call = false;
1651 {
1652 std::lock_guard<std::mutex> lock(global_lock);
1653 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1654 skip_call |= ValidateNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
1655 }
1656 if (skip_call) {
1657 return;
1658 }
1659 {
1660 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001661 DestroyObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001662 }
1663 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1664}
1665
1666VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1667 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1668 bool skip_call = false;
1669 {
1670 std::lock_guard<std::mutex> lock(global_lock);
1671 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1672 }
1673 if (skip_call) {
1674 return VK_ERROR_VALIDATION_FAILED_EXT;
1675 }
1676 VkResult result =
1677 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1678 {
1679 std::lock_guard<std::mutex> lock(global_lock);
1680 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001681 CreateNonDispatchableObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001682 }
1683 }
1684 return result;
1685}
1686
1687VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1688 bool skip_call = false;
1689 {
1690 std::lock_guard<std::mutex> lock(global_lock);
1691 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1692 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1693 }
1694 if (skip_call) {
1695 return;
1696 }
1697 {
1698 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001699 DestroyObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001700 }
1701 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1702}
1703
1704VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1705 bool skip_call = false;
1706 {
1707 std::lock_guard<std::mutex> lock(global_lock);
1708 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1709 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1710 }
1711 if (skip_call) {
1712 return;
1713 }
1714 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1715}
1716
1717VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1718 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1719 bool skip_call = false;
1720 {
1721 std::lock_guard<std::mutex> lock(global_lock);
1722 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1723 }
1724 if (skip_call) {
1725 return VK_ERROR_VALIDATION_FAILED_EXT;
1726 }
1727 VkResult result =
1728 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1729 {
1730 std::lock_guard<std::mutex> lock(global_lock);
1731 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13001732 CreateNonDispatchableObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001733 }
1734 }
1735 return result;
1736}
1737
1738VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1739 bool skip_call = false;
1740 {
1741 std::lock_guard<std::mutex> lock(global_lock);
1742 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1743 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1744 }
1745 if (skip_call) {
1746 return VK_ERROR_VALIDATION_FAILED_EXT;
1747 }
1748 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1749 return result;
1750}
1751
1752VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1753 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1754 bool skip_call = false;
1755 {
1756 std::lock_guard<std::mutex> lock(global_lock);
1757 skip_call |=
1758 ValidateDispatchableObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1759 if (begin_info) {
1760 OBJTRACK_NODE *pNode =
1761 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1762 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
1763 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
1764 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
1765 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
1766 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1767 }
1768 }
1769 }
1770 if (skip_call) {
1771 return VK_ERROR_VALIDATION_FAILED_EXT;
1772 }
1773 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1774 return result;
1775}
1776
1777VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1778 bool skip_call = false;
1779 {
1780 std::lock_guard<std::mutex> lock(global_lock);
1781 skip_call |=
1782 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1783 }
1784 if (skip_call) {
1785 return VK_ERROR_VALIDATION_FAILED_EXT;
1786 }
1787 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1788 return result;
1789}
1790
1791VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1792 bool skip_call = false;
1793 {
1794 std::lock_guard<std::mutex> lock(global_lock);
1795 skip_call |=
1796 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1797 }
1798 if (skip_call) {
1799 return VK_ERROR_VALIDATION_FAILED_EXT;
1800 }
1801 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1802 return result;
1803}
1804
1805VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1806 VkPipeline pipeline) {
1807 bool skip_call = false;
1808 {
1809 std::lock_guard<std::mutex> lock(global_lock);
1810 skip_call |=
1811 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1812 skip_call |= ValidateNonDispatchableObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1813 }
1814 if (skip_call) {
1815 return;
1816 }
1817 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1818}
1819
1820VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1821 const VkViewport *pViewports) {
1822 bool skip_call = false;
1823 {
1824 std::lock_guard<std::mutex> lock(global_lock);
1825 skip_call |=
1826 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1827 }
1828 if (skip_call) {
1829 return;
1830 }
1831 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1832}
1833
1834VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1835 const VkRect2D *pScissors) {
1836 bool skip_call = false;
1837 {
1838 std::lock_guard<std::mutex> lock(global_lock);
1839 skip_call |=
1840 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1841 }
1842 if (skip_call) {
1843 return;
1844 }
1845 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1846}
1847
1848VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1849 bool skip_call = false;
1850 {
1851 std::lock_guard<std::mutex> lock(global_lock);
1852 skip_call |=
1853 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1854 }
1855 if (skip_call) {
1856 return;
1857 }
1858 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1859}
1860
1861VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1862 float depthBiasSlopeFactor) {
1863 bool skip_call = false;
1864 {
1865 std::lock_guard<std::mutex> lock(global_lock);
1866 skip_call |=
1867 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1868 }
1869 if (skip_call) {
1870 return;
1871 }
1872 get_dispatch_table(ot_device_table_map, commandBuffer)
1873 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1874}
1875
1876VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1877 bool skip_call = false;
1878 {
1879 std::lock_guard<std::mutex> lock(global_lock);
1880 skip_call |=
1881 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1882 }
1883 if (skip_call) {
1884 return;
1885 }
1886 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1887}
1888
1889VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1890 bool skip_call = false;
1891 {
1892 std::lock_guard<std::mutex> lock(global_lock);
1893 skip_call |=
1894 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1895 }
1896 if (skip_call) {
1897 return;
1898 }
1899 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1900}
1901
1902VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1903 uint32_t compareMask) {
1904 bool skip_call = false;
1905 {
1906 std::lock_guard<std::mutex> lock(global_lock);
1907 skip_call |=
1908 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1909 }
1910 if (skip_call) {
1911 return;
1912 }
1913 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1914}
1915
1916VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1917 bool skip_call = false;
1918 {
1919 std::lock_guard<std::mutex> lock(global_lock);
1920 skip_call |=
1921 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1922 }
1923 if (skip_call) {
1924 return;
1925 }
1926 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1927}
1928
1929VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1930 bool skip_call = false;
1931 {
1932 std::lock_guard<std::mutex> lock(global_lock);
1933 skip_call |=
1934 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1935 }
1936 if (skip_call) {
1937 return;
1938 }
1939 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1940}
1941
1942VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1943 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1944 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1945 const uint32_t *pDynamicOffsets) {
1946 bool skip_call = false;
1947 {
1948 std::lock_guard<std::mutex> lock(global_lock);
1949 skip_call |=
1950 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1951 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1952 if (pDescriptorSets) {
1953 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
1954 skip_call |= ValidateNonDispatchableObject(commandBuffer, pDescriptorSets[idx0],
1955 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1956 }
1957 }
1958 }
1959 if (skip_call) {
1960 return;
1961 }
1962 get_dispatch_table(ot_device_table_map, commandBuffer)
1963 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
1964 dynamicOffsetCount, pDynamicOffsets);
1965}
1966
1967VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1968 VkIndexType indexType) {
1969 bool skip_call = false;
1970 {
1971 std::lock_guard<std::mutex> lock(global_lock);
1972 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1973 skip_call |=
1974 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1975 }
1976 if (skip_call) {
1977 return;
1978 }
1979 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
1980}
1981
1982VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
1983 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
1984 bool skip_call = false;
1985 {
1986 std::lock_guard<std::mutex> lock(global_lock);
1987 skip_call |=
1988 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1989 if (pBuffers) {
1990 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
1991 skip_call |=
1992 ValidateNonDispatchableObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1993 }
1994 }
1995 }
1996 if (skip_call) {
1997 return;
1998 }
1999 get_dispatch_table(ot_device_table_map, commandBuffer)
2000 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
2001}
2002
2003VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
2004 uint32_t firstVertex, uint32_t firstInstance) {
2005 bool skip_call = false;
2006 {
2007 std::lock_guard<std::mutex> lock(global_lock);
2008 skip_call |=
2009 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2010 }
2011 if (skip_call) {
2012 return;
2013 }
2014 get_dispatch_table(ot_device_table_map, commandBuffer)
2015 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2016}
2017
2018VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2019 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2020 bool skip_call = false;
2021 {
2022 std::lock_guard<std::mutex> lock(global_lock);
2023 skip_call |=
2024 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2025 }
2026 if (skip_call) {
2027 return;
2028 }
2029 get_dispatch_table(ot_device_table_map, commandBuffer)
2030 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2031}
2032
2033VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2034 uint32_t stride) {
2035 bool skip_call = false;
2036 {
2037 std::lock_guard<std::mutex> lock(global_lock);
2038 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2039 skip_call |=
2040 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2041 }
2042 if (skip_call) {
2043 return;
2044 }
2045 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2046}
2047
2048VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2049 uint32_t drawCount, uint32_t stride) {
2050 bool skip_call = false;
2051 {
2052 std::lock_guard<std::mutex> lock(global_lock);
2053 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2054 skip_call |=
2055 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2056 }
2057 if (skip_call) {
2058 return;
2059 }
2060 get_dispatch_table(ot_device_table_map, commandBuffer)
2061 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2062}
2063
2064VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2065 bool skip_call = false;
2066 {
2067 std::lock_guard<std::mutex> lock(global_lock);
2068 skip_call |=
2069 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2070 }
2071 if (skip_call) {
2072 return;
2073 }
2074 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2075}
2076
2077VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2078 bool skip_call = false;
2079 {
2080 std::lock_guard<std::mutex> lock(global_lock);
2081 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2082 skip_call |=
2083 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2084 }
2085 if (skip_call) {
2086 return;
2087 }
2088 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2089}
2090
2091VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2092 uint32_t regionCount, const VkBufferCopy *pRegions) {
2093 bool skip_call = false;
2094 {
2095 std::lock_guard<std::mutex> lock(global_lock);
2096 skip_call |=
2097 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2098 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2099 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2100 }
2101 if (skip_call) {
2102 return;
2103 }
2104 get_dispatch_table(ot_device_table_map, commandBuffer)
2105 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2106}
2107
2108VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2109 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2110 const VkImageCopy *pRegions) {
2111 bool skip_call = false;
2112 {
2113 std::lock_guard<std::mutex> lock(global_lock);
2114 skip_call |=
2115 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2116 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2117 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2118 }
2119 if (skip_call) {
2120 return;
2121 }
2122 get_dispatch_table(ot_device_table_map, commandBuffer)
2123 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2124}
2125
2126VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2127 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2128 const VkImageBlit *pRegions, VkFilter filter) {
2129 bool skip_call = false;
2130 {
2131 std::lock_guard<std::mutex> lock(global_lock);
2132 skip_call |=
2133 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2134 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2135 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2136 }
2137 if (skip_call) {
2138 return;
2139 }
2140 get_dispatch_table(ot_device_table_map, commandBuffer)
2141 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2142}
2143
2144VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2145 VkImageLayout dstImageLayout, uint32_t regionCount,
2146 const VkBufferImageCopy *pRegions) {
2147 bool skip_call = false;
2148 {
2149 std::lock_guard<std::mutex> lock(global_lock);
2150 skip_call |=
2151 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2152 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2153 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2154 }
2155 if (skip_call) {
2156 return;
2157 }
2158 get_dispatch_table(ot_device_table_map, commandBuffer)
2159 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2160}
2161
2162VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2163 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2164 bool skip_call = false;
2165 {
2166 std::lock_guard<std::mutex> lock(global_lock);
2167 skip_call |=
2168 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2169 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2170 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2171 }
2172 if (skip_call) {
2173 return;
2174 }
2175 get_dispatch_table(ot_device_table_map, commandBuffer)
2176 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2177}
2178
2179VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2180 VkDeviceSize dataSize, const uint32_t *pData) {
2181 bool skip_call = false;
2182 {
2183 std::lock_guard<std::mutex> lock(global_lock);
2184 skip_call |=
2185 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2186 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2187 }
2188 if (skip_call) {
2189 return;
2190 }
2191 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2192}
2193
2194VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2195 VkDeviceSize size, uint32_t data) {
2196 bool skip_call = false;
2197 {
2198 std::lock_guard<std::mutex> lock(global_lock);
2199 skip_call |=
2200 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2201 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2202 }
2203 if (skip_call) {
2204 return;
2205 }
2206 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2207}
2208
2209VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2210 const VkClearColorValue *pColor, uint32_t rangeCount,
2211 const VkImageSubresourceRange *pRanges) {
2212 bool skip_call = false;
2213 {
2214 std::lock_guard<std::mutex> lock(global_lock);
2215 skip_call |=
2216 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2217 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2218 }
2219 if (skip_call) {
2220 return;
2221 }
2222 get_dispatch_table(ot_device_table_map, commandBuffer)
2223 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2224}
2225
2226VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2227 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2228 const VkImageSubresourceRange *pRanges) {
2229 bool skip_call = false;
2230 {
2231 std::lock_guard<std::mutex> lock(global_lock);
2232 skip_call |=
2233 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2234 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2235 }
2236 if (skip_call) {
2237 return;
2238 }
2239 get_dispatch_table(ot_device_table_map, commandBuffer)
2240 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2241}
2242
2243VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2244 const VkClearAttachment *pAttachments, uint32_t rectCount,
2245 const VkClearRect *pRects) {
2246 bool skip_call = false;
2247 {
2248 std::lock_guard<std::mutex> lock(global_lock);
2249 skip_call |=
2250 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2251 }
2252 if (skip_call) {
2253 return;
2254 }
2255 get_dispatch_table(ot_device_table_map, commandBuffer)
2256 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2257}
2258
2259VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2260 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2261 const VkImageResolve *pRegions) {
2262 bool skip_call = false;
2263 {
2264 std::lock_guard<std::mutex> lock(global_lock);
2265 skip_call |=
2266 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2267 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2268 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2269 }
2270 if (skip_call) {
2271 return;
2272 }
2273 get_dispatch_table(ot_device_table_map, commandBuffer)
2274 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2275}
2276
2277VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2278 bool skip_call = false;
2279 {
2280 std::lock_guard<std::mutex> lock(global_lock);
2281 skip_call |=
2282 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2283 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2284 }
2285 if (skip_call) {
2286 return;
2287 }
2288 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2289}
2290
2291VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2292 bool skip_call = false;
2293 {
2294 std::lock_guard<std::mutex> lock(global_lock);
2295 skip_call |=
2296 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2297 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2298 }
2299 if (skip_call) {
2300 return;
2301 }
2302 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2303}
2304
2305VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2306 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2307 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2308 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2309 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2310 bool skip_call = false;
2311 {
2312 std::lock_guard<std::mutex> lock(global_lock);
2313 skip_call |=
2314 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2315 if (pBufferMemoryBarriers) {
2316 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2317 if (pBufferMemoryBarriers[idx0].buffer) {
2318 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2319 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2320 }
2321 }
2322 }
2323 if (pEvents) {
2324 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2325 skip_call |=
2326 ValidateNonDispatchableObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2327 }
2328 }
2329 if (pImageMemoryBarriers) {
2330 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2331 if (pImageMemoryBarriers[idx2].image) {
2332 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx2].image,
2333 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2334 }
2335 }
2336 }
2337 }
2338 if (skip_call) {
2339 return;
2340 }
2341 get_dispatch_table(ot_device_table_map, commandBuffer)
2342 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2343 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2344}
2345
2346VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2347 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2348 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2349 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2350 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2351 bool skip_call = false;
2352 {
2353 std::lock_guard<std::mutex> lock(global_lock);
2354 skip_call |=
2355 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2356 if (pBufferMemoryBarriers) {
2357 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2358 if (pBufferMemoryBarriers[idx0].buffer) {
2359 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2360 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2361 }
2362 }
2363 }
2364 if (pImageMemoryBarriers) {
2365 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2366 if (pImageMemoryBarriers[idx1].image) {
2367 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx1].image,
2368 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2369 }
2370 }
2371 }
2372 }
2373 if (skip_call) {
2374 return;
2375 }
2376 get_dispatch_table(ot_device_table_map, commandBuffer)
2377 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2378 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2379}
2380
2381VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2382 VkQueryControlFlags flags) {
2383 bool skip_call = false;
2384 {
2385 std::lock_guard<std::mutex> lock(global_lock);
2386 skip_call |=
2387 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2388 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2389 }
2390 if (skip_call) {
2391 return;
2392 }
2393 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2394}
2395
2396VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2397 bool skip_call = false;
2398 {
2399 std::lock_guard<std::mutex> lock(global_lock);
2400 skip_call |=
2401 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2402 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2403 }
2404 if (skip_call) {
2405 return;
2406 }
2407 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2408}
2409
2410VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2411 uint32_t queryCount) {
2412 bool skip_call = false;
2413 {
2414 std::lock_guard<std::mutex> lock(global_lock);
2415 skip_call |=
2416 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2417 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2418 }
2419 if (skip_call) {
2420 return;
2421 }
2422 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2423}
2424
2425VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2426 VkQueryPool queryPool, uint32_t query) {
2427 bool skip_call = false;
2428 {
2429 std::lock_guard<std::mutex> lock(global_lock);
2430 skip_call |=
2431 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2432 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2433 }
2434 if (skip_call) {
2435 return;
2436 }
2437 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2438}
2439
2440VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2441 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2442 VkDeviceSize stride, VkQueryResultFlags flags) {
2443 bool skip_call = false;
2444 {
2445 std::lock_guard<std::mutex> lock(global_lock);
2446 skip_call |=
2447 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2448 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2449 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2450 }
2451 if (skip_call) {
2452 return;
2453 }
2454 get_dispatch_table(ot_device_table_map, commandBuffer)
2455 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2456}
2457
2458VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2459 uint32_t offset, uint32_t size, const void *pValues) {
2460 bool skip_call = false;
2461 {
2462 std::lock_guard<std::mutex> lock(global_lock);
2463 skip_call |=
2464 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2465 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
2466 }
2467 if (skip_call) {
2468 return;
2469 }
2470 get_dispatch_table(ot_device_table_map, commandBuffer)
2471 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2472}
2473
2474VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2475 VkSubpassContents contents) {
2476 bool skip_call = false;
2477 {
2478 std::lock_guard<std::mutex> lock(global_lock);
2479 skip_call |=
2480 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2481 if (pRenderPassBegin) {
2482 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->framebuffer,
2483 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
2484 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->renderPass,
2485 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2486 }
2487 }
2488 if (skip_call) {
2489 return;
2490 }
2491 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2492}
2493
2494VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2495 bool skip_call = false;
2496 {
2497 std::lock_guard<std::mutex> lock(global_lock);
2498 skip_call |=
2499 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2500 }
2501 if (skip_call) {
2502 return;
2503 }
2504 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2505}
2506
2507VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2508 bool skip_call = false;
2509 {
2510 std::lock_guard<std::mutex> lock(global_lock);
2511 skip_call |=
2512 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2513 }
2514 if (skip_call) {
2515 return;
2516 }
2517 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2518}
2519
2520VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2521 const VkCommandBuffer *pCommandBuffers) {
2522 bool skip_call = false;
2523 {
2524 std::lock_guard<std::mutex> lock(global_lock);
2525 skip_call |=
2526 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2527 if (pCommandBuffers) {
2528 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
2529 skip_call |= ValidateDispatchableObject(commandBuffer, pCommandBuffers[idx0],
2530 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2531 }
2532 }
2533 }
2534 if (skip_call) {
2535 return;
2536 }
2537 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2538}
2539
2540VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2541 bool skip_call = false;
2542 {
2543 std::lock_guard<std::mutex> lock(global_lock);
2544 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2545 skip_call |= ValidateNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2546 }
2547 if (skip_call) {
2548 return;
2549 }
2550 {
2551 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13002552 DestroyObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002553 }
2554 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2555}
2556
2557VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2558 VkSurfaceKHR surface, VkBool32 *pSupported) {
2559 bool skip_call = false;
2560 {
2561 std::lock_guard<std::mutex> lock(global_lock);
2562 skip_call |=
2563 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2564 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2565 }
2566 if (skip_call) {
2567 return VK_ERROR_VALIDATION_FAILED_EXT;
2568 }
2569 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2570 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2571 return result;
2572}
2573
2574VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2575 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2576 bool skip_call = false;
2577 {
2578 std::lock_guard<std::mutex> lock(global_lock);
2579 skip_call |=
2580 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2581 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2582 }
2583 if (skip_call) {
2584 return VK_ERROR_VALIDATION_FAILED_EXT;
2585 }
2586 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2587 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2588 return result;
2589}
2590
2591VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2592 uint32_t *pSurfaceFormatCount,
2593 VkSurfaceFormatKHR *pSurfaceFormats) {
2594 bool skip_call = false;
2595 {
2596 std::lock_guard<std::mutex> lock(global_lock);
2597 skip_call |=
2598 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2599 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2600 }
2601 if (skip_call) {
2602 return VK_ERROR_VALIDATION_FAILED_EXT;
2603 }
2604 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2605 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2606 return result;
2607}
2608
2609VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2610 uint32_t *pPresentModeCount,
2611 VkPresentModeKHR *pPresentModes) {
2612 bool skip_call = false;
2613 {
2614 std::lock_guard<std::mutex> lock(global_lock);
2615 skip_call |=
2616 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2617 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2618 }
2619 if (skip_call) {
2620 return VK_ERROR_VALIDATION_FAILED_EXT;
2621 }
2622 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2623 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2624 return result;
2625}
2626
2627VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2628 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2629 bool skip_call = false;
2630 {
2631 std::lock_guard<std::mutex> lock(global_lock);
2632 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2633 if (pCreateInfo) {
2634 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->oldSwapchain,
2635 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2636 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2637 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfo->surface,
2638 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2639 }
2640 }
2641 if (skip_call) {
2642 return VK_ERROR_VALIDATION_FAILED_EXT;
2643 }
2644 VkResult result =
2645 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2646 {
2647 std::lock_guard<std::mutex> lock(global_lock);
2648 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002649 CreateNonDispatchableObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002650 }
2651 }
2652 return result;
2653}
2654
2655VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2656 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2657 bool skip_call = false;
2658 {
2659 std::lock_guard<std::mutex> lock(global_lock);
2660 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2661 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2662 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2663 skip_call |= ValidateNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2664 }
2665 if (skip_call) {
2666 return VK_ERROR_VALIDATION_FAILED_EXT;
2667 }
2668 VkResult result = get_dispatch_table(ot_device_table_map, device)
2669 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2670 return result;
2671}
2672
2673VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2674 bool skip_call = false;
2675 {
2676 std::lock_guard<std::mutex> lock(global_lock);
2677 if (pPresentInfo) {
2678 if (pPresentInfo->pSwapchains) {
2679 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
2680 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pSwapchains[idx0],
2681 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2682 }
2683 }
2684 if (pPresentInfo->pWaitSemaphores) {
2685 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
2686 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pWaitSemaphores[idx1],
2687 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2688 }
2689 }
2690 }
2691 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
2692 }
2693 if (skip_call) {
2694 return VK_ERROR_VALIDATION_FAILED_EXT;
2695 }
2696 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2697 return result;
2698}
2699
2700#ifdef VK_USE_PLATFORM_WIN32_KHR
2701VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2702 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2703 bool skip_call = false;
2704 {
2705 std::lock_guard<std::mutex> lock(global_lock);
2706 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2707 }
2708 if (skip_call) {
2709 return VK_ERROR_VALIDATION_FAILED_EXT;
2710 }
2711 VkResult result =
2712 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2713 {
2714 std::lock_guard<std::mutex> lock(global_lock);
2715 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002716 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002717 }
2718 }
2719 return result;
2720}
2721
2722VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2723 uint32_t queueFamilyIndex) {
2724 bool skip_call = false;
2725 {
2726 std::lock_guard<std::mutex> lock(global_lock);
2727 skip_call |=
2728 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2729 }
2730 if (skip_call) {
2731 return VK_FALSE;
2732 }
2733 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2734 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2735 return result;
2736}
2737#endif // VK_USE_PLATFORM_WIN32_KHR
2738
2739#ifdef VK_USE_PLATFORM_XCB_KHR
2740VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2741 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2742 bool skip_call = false;
2743 {
2744 std::lock_guard<std::mutex> lock(global_lock);
2745 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2746 }
2747 if (skip_call) {
2748 return VK_ERROR_VALIDATION_FAILED_EXT;
2749 }
2750 VkResult result =
2751 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2752 {
2753 std::lock_guard<std::mutex> lock(global_lock);
2754 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002755 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002756 }
2757 }
2758 return result;
2759}
2760
2761VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2762 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2763 xcb_visualid_t visual_id) {
2764 bool skip_call = false;
2765 {
2766 std::lock_guard<std::mutex> lock(global_lock);
2767 skip_call |=
2768 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2769 }
2770 if (skip_call) {
2771 return VK_FALSE;
2772 }
2773 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2774 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2775 return result;
2776}
2777#endif // VK_USE_PLATFORM_XCB_KHR
2778
2779#ifdef VK_USE_PLATFORM_XLIB_KHR
2780VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2781 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2782 bool skip_call = false;
2783 {
2784 std::lock_guard<std::mutex> lock(global_lock);
2785 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2786 }
2787 if (skip_call) {
2788 return VK_ERROR_VALIDATION_FAILED_EXT;
2789 }
2790 VkResult result =
2791 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2792 {
2793 std::lock_guard<std::mutex> lock(global_lock);
2794 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002795 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002796 }
2797 }
2798 return result;
2799}
2800
2801VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2802 uint32_t queueFamilyIndex, Display *dpy,
2803 VisualID visualID) {
2804 bool skip_call = false;
2805 {
2806 std::lock_guard<std::mutex> lock(global_lock);
2807 skip_call |=
2808 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2809 }
2810 if (skip_call) {
2811 return VK_FALSE;
2812 }
2813 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2814 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2815 return result;
2816}
2817#endif // VK_USE_PLATFORM_XLIB_KHR
2818
2819#ifdef VK_USE_PLATFORM_MIR_KHR
2820VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2821 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2822 bool skip_call = false;
2823 {
2824 std::lock_guard<std::mutex> lock(global_lock);
2825 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2826 }
2827 if (skip_call) {
2828 return VK_ERROR_VALIDATION_FAILED_EXT;
2829 }
2830 VkResult result =
2831 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2832 {
2833 std::lock_guard<std::mutex> lock(global_lock);
2834 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002835 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002836 }
2837 }
2838 return result;
2839}
2840
2841VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2842 uint32_t queueFamilyIndex, MirConnection *connection) {
2843 bool skip_call = false;
2844 {
2845 std::lock_guard<std::mutex> lock(global_lock);
2846 skip_call |=
2847 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2848 }
2849 if (skip_call) {
2850 return VK_FALSE;
2851 }
2852 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2853 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2854 return result;
2855}
2856#endif // VK_USE_PLATFORM_MIR_KHR
2857
2858#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2859VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2860 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2861 bool skip_call = false;
2862 {
2863 std::lock_guard<std::mutex> lock(global_lock);
2864 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2865 }
2866 if (skip_call) {
2867 return VK_ERROR_VALIDATION_FAILED_EXT;
2868 }
2869 VkResult result =
2870 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2871 {
2872 std::lock_guard<std::mutex> lock(global_lock);
2873 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002874 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002875 }
2876 }
2877 return result;
2878}
2879
2880VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2881 uint32_t queueFamilyIndex,
2882 struct wl_display *display) {
2883 bool skip_call = false;
2884 {
2885 std::lock_guard<std::mutex> lock(global_lock);
2886 skip_call |=
2887 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2888 }
2889 if (skip_call) {
2890 return VK_FALSE;
2891 }
2892 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2893 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2894 return result;
2895}
2896#endif // VK_USE_PLATFORM_WAYLAND_KHR
2897
2898#ifdef VK_USE_PLATFORM_ANDROID_KHR
2899VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2900 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2901 bool skip_call = false;
2902 {
2903 std::lock_guard<std::mutex> lock(global_lock);
2904 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2905 }
2906 if (skip_call) {
2907 return VK_ERROR_VALIDATION_FAILED_EXT;
2908 }
2909 VkResult result =
2910 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2911 {
2912 std::lock_guard<std::mutex> lock(global_lock);
2913 if (result == VK_SUCCESS) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002914 CreateNonDispatchableObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002915 }
2916 }
2917 return result;
2918}
2919#endif // VK_USE_PLATFORM_ANDROID_KHR
2920
Mark Youngead9b932016-09-08 12:28:38 -06002921VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
2922 const VkSwapchainCreateInfoKHR *pCreateInfos,
2923 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
2924 bool skip_call = false;
2925 uint32_t i = 0;
2926 {
2927 std::lock_guard<std::mutex> lock(global_lock);
2928 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2929 if (NULL != pCreateInfos) {
2930 for (i = 0; i < swapchainCount; i++) {
2931 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[i].oldSwapchain,
2932 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2933 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2934 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfos[i].surface,
2935 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2936 }
2937 }
2938 }
2939 if (skip_call) {
2940 return VK_ERROR_VALIDATION_FAILED_EXT;
2941 }
2942 VkResult result =
2943 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
2944 {
2945 std::lock_guard<std::mutex> lock(global_lock);
2946 if (result == VK_SUCCESS) {
2947 for (i = 0; i < swapchainCount; i++) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002948 CreateNonDispatchableObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Youngead9b932016-09-08 12:28:38 -06002949 }
2950 }
2951 }
2952 return result;
2953}
2954
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002955VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2956 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2957 const VkAllocationCallbacks *pAllocator,
2958 VkDebugReportCallbackEXT *pCallback) {
2959 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2960 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
2961 if (VK_SUCCESS == result) {
2962 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2963 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
Chris Forbesdbfe96a2016-09-29 13:51:10 +13002964 CreateNonDispatchableObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002965 }
2966 return result;
2967}
2968
2969VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
2970 const VkAllocationCallbacks *pAllocator) {
2971 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2972 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
2973 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2974 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
Chris Forbesec461992016-09-29 14:41:44 +13002975 DestroyObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002976}
2977
2978VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
2979 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
2980 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
2981 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2982 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
2983}
2984
2985static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
2986
2987static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
2988 VK_LAYER_API_VERSION, // specVersion
2989 1, // implementationVersion
2990 "LunarG Validation Layer"};
2991
2992VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
2993 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2994}
2995
2996VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
2997 VkLayerProperties *pProperties) {
2998 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2999}
3000
3001VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3002 VkExtensionProperties *pProperties) {
3003 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3004 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
3005
3006 return VK_ERROR_LAYER_NOT_PRESENT;
3007}
3008
3009VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
3010 uint32_t *pCount, VkExtensionProperties *pProperties) {
3011 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3012 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
3013
3014 assert(physicalDevice);
3015 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3016 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3017}
3018
3019static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3020 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3021 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3022}
3023
3024static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3025 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3026 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3027 return nullptr;
3028
3029 if (!strcmp("vkDestroySurfaceKHR", name))
3030 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3031 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3032 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3033 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3034 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3035 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3036 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3037 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3038 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3039
3040#ifdef VK_USE_PLATFORM_WIN32_KHR
3041 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3042 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3043 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3044 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3045#endif // VK_USE_PLATFORM_WIN32_KHR
3046#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003047 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003048 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003049 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003050 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3051#endif // VK_USE_PLATFORM_XCB_KHR
3052#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003053 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003054 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003055 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003056 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3057#endif // VK_USE_PLATFORM_XLIB_KHR
3058#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003059 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003060 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003061 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003062 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3063#endif // VK_USE_PLATFORM_MIR_KHR
3064#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003065 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003066 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003067 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003068 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3069#endif // VK_USE_PLATFORM_WAYLAND_KHR
3070#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003071 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003072 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3073#endif // VK_USE_PLATFORM_ANDROID_KHR
3074
3075 return nullptr;
3076}
3077
3078static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3079 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3080 device_data->wsi_enabled = false;
Mark Youngead9b932016-09-08 12:28:38 -06003081 device_data->wsi_display_swapchain_enabled = false;
3082 device_data->objtrack_extensions_enabled = false;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003083
3084 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3085 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3086 device_data->wsi_enabled = true;
3087 }
Mark Youngead9b932016-09-08 12:28:38 -06003088 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) {
3089 device_data->wsi_display_swapchain_enabled = true;
3090 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003091 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3092 device_data->objtrack_extensions_enabled = true;
3093 }
3094 }
3095}
3096
3097static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3098 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3099
3100
3101 instanceExtMap[pDisp] = {};
3102
3103 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3104 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3105 instanceExtMap[pDisp].wsi_enabled = true;
3106 }
3107#ifdef VK_USE_PLATFORM_XLIB_KHR
3108 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3109 instanceExtMap[pDisp].xlib_enabled = true;
3110 }
3111#endif
3112#ifdef VK_USE_PLATFORM_XCB_KHR
3113 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3114 instanceExtMap[pDisp].xcb_enabled = true;
3115 }
3116#endif
3117#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3118 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3119 instanceExtMap[pDisp].wayland_enabled = true;
3120 }
3121#endif
3122#ifdef VK_USE_PLATFORM_MIR_KHR
3123 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3124 instanceExtMap[pDisp].mir_enabled = true;
3125 }
3126#endif
3127#ifdef VK_USE_PLATFORM_ANDROID_KHR
3128 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3129 instanceExtMap[pDisp].android_enabled = true;
3130 }
3131#endif
3132#ifdef VK_USE_PLATFORM_WIN32_KHR
3133 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3134 instanceExtMap[pDisp].win32_enabled = true;
3135 }
3136#endif
3137 }
3138}
3139
3140VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3141 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3142 std::lock_guard<std::mutex> lock(global_lock);
3143 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3144 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3145
3146 assert(chain_info->u.pLayerInfo);
3147 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3148 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3149 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3150 if (fpCreateDevice == NULL) {
3151 return VK_ERROR_INITIALIZATION_FAILED;
3152 }
3153
3154 // Advance the link info for the next element on the chain
3155 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3156
3157 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3158 if (result != VK_SUCCESS) {
3159 return result;
3160 }
3161
3162 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3163 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3164
3165 // Add link back to physDev
3166 device_data->physical_device = physicalDevice;
3167
3168 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3169
3170 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003171 CreateDispatchableObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003172
3173 return result;
3174}
3175
3176VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3177 uint32_t *pQueueFamilyPropertyCount,
3178 VkQueueFamilyProperties *pQueueFamilyProperties) {
3179 get_dispatch_table(ot_instance_table_map, physicalDevice)
3180 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3181 std::lock_guard<std::mutex> lock(global_lock);
3182 if (pQueueFamilyProperties != NULL) {
3183 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3184 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3185 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3186 }
3187 }
3188}
3189
3190VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3191 VkInstance *pInstance) {
3192 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3193
3194 assert(chain_info->u.pLayerInfo);
3195 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3196 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3197 if (fpCreateInstance == NULL) {
3198 return VK_ERROR_INITIALIZATION_FAILED;
3199 }
3200
3201 // Advance the link info for the next element on the chain
3202 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3203
3204 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3205 if (result != VK_SUCCESS) {
3206 return result;
3207 }
3208
3209 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3210 instance_data->instance = *pInstance;
3211 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3212 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3213
3214 // Look for one or more debug report create info structures, and copy the
3215 // callback(s) for each one found (for use by vkDestroyInstance)
3216 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3217 &instance_data->tmp_callbacks);
3218
3219 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3220 pCreateInfo->ppEnabledExtensionNames);
3221
3222 InitObjectTracker(instance_data, pAllocator);
3223 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3224
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003225 CreateDispatchableObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003226
3227 return result;
3228}
3229
3230VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3231 VkPhysicalDevice *pPhysicalDevices) {
3232 bool skip_call = VK_FALSE;
3233 std::unique_lock<std::mutex> lock(global_lock);
3234 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
3235 lock.unlock();
3236 if (skip_call) {
3237 return VK_ERROR_VALIDATION_FAILED_EXT;
3238 }
3239 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3240 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3241 lock.lock();
3242 if (result == VK_SUCCESS) {
3243 if (pPhysicalDevices) {
3244 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003245 CreateDispatchableObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003246 }
3247 }
3248 }
3249 lock.unlock();
3250 return result;
3251}
3252
3253VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3254 std::unique_lock<std::mutex> lock(global_lock);
3255 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3256 lock.unlock();
3257
3258 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3259
3260 lock.lock();
3261
3262 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3263 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3264}
3265
3266VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3267 std::unique_lock<std::mutex> lock(global_lock);
3268 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3269 lock.unlock();
3270
3271 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3272
3273 lock.lock();
Chris Forbesec461992016-09-29 14:41:44 +13003274 DestroyObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003275}
3276
3277VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3278 VkMemoryMapFlags flags, void **ppData) {
3279 bool skip_call = VK_FALSE;
3280 std::unique_lock<std::mutex> lock(global_lock);
3281 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3282 lock.unlock();
3283 if (skip_call == VK_TRUE) {
3284 return VK_ERROR_VALIDATION_FAILED_EXT;
3285 }
3286 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3287 return result;
3288}
3289
3290VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3291 bool skip_call = VK_FALSE;
3292 std::unique_lock<std::mutex> lock(global_lock);
3293 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3294 lock.unlock();
3295 if (skip_call == VK_TRUE) {
3296 return;
3297 }
3298
3299 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3300}
3301VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3302 VkFence fence) {
3303 std::unique_lock<std::mutex> lock(global_lock);
3304 ValidateQueueFlags(queue, "QueueBindSparse");
3305
3306 for (uint32_t i = 0; i < bindInfoCount; i++) {
3307 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
3308 ValidateNonDispatchableObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3309 false);
3310 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
3311 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3312 false);
3313 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
3314 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
3315 }
3316 lock.unlock();
3317
3318 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3319 return result;
3320}
3321
3322VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3323 VkCommandBuffer *pCommandBuffers) {
3324 bool skip_call = VK_FALSE;
3325 std::unique_lock<std::mutex> lock(global_lock);
3326 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3327 skip_call |=
3328 ValidateNonDispatchableObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3329 lock.unlock();
3330
3331 if (skip_call) {
3332 return VK_ERROR_VALIDATION_FAILED_EXT;
3333 }
3334
3335 VkResult result =
3336 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3337
3338 lock.lock();
3339 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3340 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3341 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3342 }
3343 lock.unlock();
3344
3345 return result;
3346}
3347
3348VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3349 VkDescriptorSet *pDescriptorSets) {
3350 bool skip_call = VK_FALSE;
3351 std::unique_lock<std::mutex> lock(global_lock);
3352 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3353 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->descriptorPool,
3354 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3355 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3356 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->pSetLayouts[i],
3357 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3358 }
3359 lock.unlock();
3360 if (skip_call) {
3361 return VK_ERROR_VALIDATION_FAILED_EXT;
3362 }
3363
3364 VkResult result =
3365 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3366
3367 if (VK_SUCCESS == result) {
3368 lock.lock();
3369 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3370 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3371 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3372 }
3373 lock.unlock();
3374 }
3375
3376 return result;
3377}
3378
3379VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3380 const VkCommandBuffer *pCommandBuffers) {
3381 bool skip_call = false;
3382 std::unique_lock<std::mutex> lock(global_lock);
3383 ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3384 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3385 for (uint32_t i = 0; i < commandBufferCount; i++) {
3386 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3387 }
3388
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003389 for (uint32_t i = 0; i < commandBufferCount; i++) {
Chris Forbesec461992016-09-29 14:41:44 +13003390 DestroyObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003391 }
3392
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003393 lock.unlock();
3394 if (!skip_call) {
3395 get_dispatch_table(ot_device_table_map, device)
3396 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3397 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003398}
3399VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3400 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3401 std::unique_lock<std::mutex> lock(global_lock);
3402 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3403 // Remove this swapchain's images from our map of such images.
3404 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3405 while (itr != device_data->swapchainImageMap.end()) {
3406 OBJTRACK_NODE *pNode = (*itr).second;
3407 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3408 delete pNode;
3409 auto delete_item = itr++;
3410 device_data->swapchainImageMap.erase(delete_item);
3411 } else {
3412 ++itr;
3413 }
3414 }
Chris Forbesec461992016-09-29 14:41:44 +13003415 DestroyObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003416 lock.unlock();
3417
3418 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3419}
3420
3421VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3422 const VkDescriptorSet *pDescriptorSets) {
3423 bool skip_call = false;
3424 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3425 std::unique_lock<std::mutex> lock(global_lock);
3426 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3427 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3428 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3429 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3430 }
3431
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003432 for (uint32_t i = 0; i < descriptorSetCount; i++) {
Chris Forbesec461992016-09-29 14:41:44 +13003433 DestroyObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003434 }
3435
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003436 lock.unlock();
3437 if (!skip_call) {
3438 result = get_dispatch_table(ot_device_table_map, device)
3439 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3440 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003441 return result;
3442}
3443
3444VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3445 const VkAllocationCallbacks *pAllocator) {
3446 bool skip_call = VK_FALSE;
3447 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3448 std::unique_lock<std::mutex> lock(global_lock);
3449 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3450 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3451 lock.unlock();
3452 if (skip_call) {
3453 return;
3454 }
3455 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3456 // Remove this pool's descriptor sets from our descriptorSet map.
3457 lock.lock();
3458 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3459 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3460 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3461 OBJTRACK_NODE *pNode = (*itr).second;
3462 auto del_itr = itr++;
3463 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
Chris Forbesec461992016-09-29 14:41:44 +13003464 DestroyObject(device, (VkDescriptorSet)((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003465 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003466 }
3467 }
Chris Forbesec461992016-09-29 14:41:44 +13003468 DestroyObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003469 lock.unlock();
3470 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3471}
3472
3473VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3474 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3475 bool skip_call = false;
3476 std::unique_lock<std::mutex> lock(global_lock);
3477 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3478 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3479 lock.unlock();
3480 if (skip_call) {
3481 return;
3482 }
3483 lock.lock();
3484 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3485 // Remove this pool's cmdBuffers from our cmd buffer map.
3486 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3487 auto del_itr = itr;
3488 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3489 OBJTRACK_NODE *pNode = (*itr).second;
3490 del_itr = itr++;
3491 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3492 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
Chris Forbesec461992016-09-29 14:41:44 +13003493 DestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003494 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003495 }
3496 }
Chris Forbesec461992016-09-29 14:41:44 +13003497 DestroyObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003498 lock.unlock();
3499 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3500}
3501
3502VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3503 VkImage *pSwapchainImages) {
3504 bool skip_call = VK_FALSE;
3505 std::unique_lock<std::mutex> lock(global_lock);
3506 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3507 lock.unlock();
3508 if (skip_call) {
3509 return VK_ERROR_VALIDATION_FAILED_EXT;
3510 }
3511 VkResult result = get_dispatch_table(ot_device_table_map, device)
3512 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3513 if (pSwapchainImages != NULL) {
3514 lock.lock();
3515 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3516 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3517 }
3518 lock.unlock();
3519 }
3520 return result;
3521}
3522
3523VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3524 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3525 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3526 bool skip_call = VK_FALSE;
3527 std::unique_lock<std::mutex> lock(global_lock);
3528 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3529 if (pCreateInfos) {
3530 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3531 if (pCreateInfos[idx0].basePipelineHandle) {
3532 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3533 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3534 }
3535 if (pCreateInfos[idx0].layout) {
3536 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3537 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3538 }
3539 if (pCreateInfos[idx0].pStages) {
3540 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3541 if (pCreateInfos[idx0].pStages[idx1].module) {
3542 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].pStages[idx1].module,
3543 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3544 }
3545 }
3546 }
3547 if (pCreateInfos[idx0].renderPass) {
3548 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].renderPass,
3549 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3550 }
3551 }
3552 }
3553 if (pipelineCache) {
3554 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3555 }
3556 lock.unlock();
3557 if (skip_call) {
3558 return VK_ERROR_VALIDATION_FAILED_EXT;
3559 }
3560 VkResult result = get_dispatch_table(ot_device_table_map, device)
3561 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3562 lock.lock();
3563 if (result == VK_SUCCESS) {
3564 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003565 CreateNonDispatchableObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003566 }
3567 }
3568 lock.unlock();
3569 return result;
3570}
3571
3572VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3573 const VkComputePipelineCreateInfo *pCreateInfos,
3574 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3575 bool skip_call = VK_FALSE;
3576 std::unique_lock<std::mutex> lock(global_lock);
3577 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3578 if (pCreateInfos) {
3579 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3580 if (pCreateInfos[idx0].basePipelineHandle) {
3581 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3582 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3583 }
3584 if (pCreateInfos[idx0].layout) {
3585 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3586 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3587 }
3588 if (pCreateInfos[idx0].stage.module) {
3589 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].stage.module,
3590 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3591 }
3592 }
3593 }
3594 if (pipelineCache) {
3595 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3596 }
3597 lock.unlock();
3598 if (skip_call) {
3599 return VK_ERROR_VALIDATION_FAILED_EXT;
3600 }
3601 VkResult result = get_dispatch_table(ot_device_table_map, device)
3602 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3603 lock.lock();
3604 if (result == VK_SUCCESS) {
3605 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
Chris Forbesdbfe96a2016-09-29 13:51:10 +13003606 CreateNonDispatchableObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003607 }
3608 }
3609 lock.unlock();
3610 return result;
3611}
3612
3613static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3614 if (!name || name[0] != 'v' || name[1] != 'k')
3615 return NULL;
3616
3617 name += 2;
3618 if (!strcmp(name, "GetDeviceProcAddr"))
3619 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3620 if (!strcmp(name, "DestroyDevice"))
3621 return (PFN_vkVoidFunction)DestroyDevice;
3622 if (!strcmp(name, "GetDeviceQueue"))
3623 return (PFN_vkVoidFunction)GetDeviceQueue;
3624 if (!strcmp(name, "QueueSubmit"))
3625 return (PFN_vkVoidFunction)QueueSubmit;
3626 if (!strcmp(name, "QueueWaitIdle"))
3627 return (PFN_vkVoidFunction)QueueWaitIdle;
3628 if (!strcmp(name, "DeviceWaitIdle"))
3629 return (PFN_vkVoidFunction)DeviceWaitIdle;
3630 if (!strcmp(name, "AllocateMemory"))
3631 return (PFN_vkVoidFunction)AllocateMemory;
3632 if (!strcmp(name, "FreeMemory"))
3633 return (PFN_vkVoidFunction)FreeMemory;
3634 if (!strcmp(name, "MapMemory"))
3635 return (PFN_vkVoidFunction)MapMemory;
3636 if (!strcmp(name, "UnmapMemory"))
3637 return (PFN_vkVoidFunction)UnmapMemory;
3638 if (!strcmp(name, "FlushMappedMemoryRanges"))
3639 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3640 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3641 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3642 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3643 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3644 if (!strcmp(name, "BindBufferMemory"))
3645 return (PFN_vkVoidFunction)BindBufferMemory;
3646 if (!strcmp(name, "BindImageMemory"))
3647 return (PFN_vkVoidFunction)BindImageMemory;
3648 if (!strcmp(name, "GetBufferMemoryRequirements"))
3649 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3650 if (!strcmp(name, "GetImageMemoryRequirements"))
3651 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3652 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3653 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3654 if (!strcmp(name, "QueueBindSparse"))
3655 return (PFN_vkVoidFunction)QueueBindSparse;
3656 if (!strcmp(name, "CreateFence"))
3657 return (PFN_vkVoidFunction)CreateFence;
3658 if (!strcmp(name, "DestroyFence"))
3659 return (PFN_vkVoidFunction)DestroyFence;
3660 if (!strcmp(name, "ResetFences"))
3661 return (PFN_vkVoidFunction)ResetFences;
3662 if (!strcmp(name, "GetFenceStatus"))
3663 return (PFN_vkVoidFunction)GetFenceStatus;
3664 if (!strcmp(name, "WaitForFences"))
3665 return (PFN_vkVoidFunction)WaitForFences;
3666 if (!strcmp(name, "CreateSemaphore"))
3667 return (PFN_vkVoidFunction)CreateSemaphore;
3668 if (!strcmp(name, "DestroySemaphore"))
3669 return (PFN_vkVoidFunction)DestroySemaphore;
3670 if (!strcmp(name, "CreateEvent"))
3671 return (PFN_vkVoidFunction)CreateEvent;
3672 if (!strcmp(name, "DestroyEvent"))
3673 return (PFN_vkVoidFunction)DestroyEvent;
3674 if (!strcmp(name, "GetEventStatus"))
3675 return (PFN_vkVoidFunction)GetEventStatus;
3676 if (!strcmp(name, "SetEvent"))
3677 return (PFN_vkVoidFunction)SetEvent;
3678 if (!strcmp(name, "ResetEvent"))
3679 return (PFN_vkVoidFunction)ResetEvent;
3680 if (!strcmp(name, "CreateQueryPool"))
3681 return (PFN_vkVoidFunction)CreateQueryPool;
3682 if (!strcmp(name, "DestroyQueryPool"))
3683 return (PFN_vkVoidFunction)DestroyQueryPool;
3684 if (!strcmp(name, "GetQueryPoolResults"))
3685 return (PFN_vkVoidFunction)GetQueryPoolResults;
3686 if (!strcmp(name, "CreateBuffer"))
3687 return (PFN_vkVoidFunction)CreateBuffer;
3688 if (!strcmp(name, "DestroyBuffer"))
3689 return (PFN_vkVoidFunction)DestroyBuffer;
3690 if (!strcmp(name, "CreateBufferView"))
3691 return (PFN_vkVoidFunction)CreateBufferView;
3692 if (!strcmp(name, "DestroyBufferView"))
3693 return (PFN_vkVoidFunction)DestroyBufferView;
3694 if (!strcmp(name, "CreateImage"))
3695 return (PFN_vkVoidFunction)CreateImage;
3696 if (!strcmp(name, "DestroyImage"))
3697 return (PFN_vkVoidFunction)DestroyImage;
3698 if (!strcmp(name, "GetImageSubresourceLayout"))
3699 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3700 if (!strcmp(name, "CreateImageView"))
3701 return (PFN_vkVoidFunction)CreateImageView;
3702 if (!strcmp(name, "DestroyImageView"))
3703 return (PFN_vkVoidFunction)DestroyImageView;
3704 if (!strcmp(name, "CreateShaderModule"))
3705 return (PFN_vkVoidFunction)CreateShaderModule;
3706 if (!strcmp(name, "DestroyShaderModule"))
3707 return (PFN_vkVoidFunction)DestroyShaderModule;
3708 if (!strcmp(name, "CreatePipelineCache"))
3709 return (PFN_vkVoidFunction)CreatePipelineCache;
3710 if (!strcmp(name, "DestroyPipelineCache"))
3711 return (PFN_vkVoidFunction)DestroyPipelineCache;
3712 if (!strcmp(name, "GetPipelineCacheData"))
3713 return (PFN_vkVoidFunction)GetPipelineCacheData;
3714 if (!strcmp(name, "MergePipelineCaches"))
3715 return (PFN_vkVoidFunction)MergePipelineCaches;
3716 if (!strcmp(name, "CreateGraphicsPipelines"))
3717 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3718 if (!strcmp(name, "CreateComputePipelines"))
3719 return (PFN_vkVoidFunction)CreateComputePipelines;
3720 if (!strcmp(name, "DestroyPipeline"))
3721 return (PFN_vkVoidFunction)DestroyPipeline;
3722 if (!strcmp(name, "CreatePipelineLayout"))
3723 return (PFN_vkVoidFunction)CreatePipelineLayout;
3724 if (!strcmp(name, "DestroyPipelineLayout"))
3725 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3726 if (!strcmp(name, "CreateSampler"))
3727 return (PFN_vkVoidFunction)CreateSampler;
3728 if (!strcmp(name, "DestroySampler"))
3729 return (PFN_vkVoidFunction)DestroySampler;
3730 if (!strcmp(name, "CreateDescriptorSetLayout"))
3731 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3732 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3733 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3734 if (!strcmp(name, "CreateDescriptorPool"))
3735 return (PFN_vkVoidFunction)CreateDescriptorPool;
3736 if (!strcmp(name, "DestroyDescriptorPool"))
3737 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3738 if (!strcmp(name, "ResetDescriptorPool"))
3739 return (PFN_vkVoidFunction)ResetDescriptorPool;
3740 if (!strcmp(name, "AllocateDescriptorSets"))
3741 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3742 if (!strcmp(name, "FreeDescriptorSets"))
3743 return (PFN_vkVoidFunction)FreeDescriptorSets;
3744 if (!strcmp(name, "UpdateDescriptorSets"))
3745 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3746 if (!strcmp(name, "CreateFramebuffer"))
3747 return (PFN_vkVoidFunction)CreateFramebuffer;
3748 if (!strcmp(name, "DestroyFramebuffer"))
3749 return (PFN_vkVoidFunction)DestroyFramebuffer;
3750 if (!strcmp(name, "CreateRenderPass"))
3751 return (PFN_vkVoidFunction)CreateRenderPass;
3752 if (!strcmp(name, "DestroyRenderPass"))
3753 return (PFN_vkVoidFunction)DestroyRenderPass;
3754 if (!strcmp(name, "GetRenderAreaGranularity"))
3755 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3756 if (!strcmp(name, "CreateCommandPool"))
3757 return (PFN_vkVoidFunction)CreateCommandPool;
3758 if (!strcmp(name, "DestroyCommandPool"))
3759 return (PFN_vkVoidFunction)DestroyCommandPool;
3760 if (!strcmp(name, "ResetCommandPool"))
3761 return (PFN_vkVoidFunction)ResetCommandPool;
3762 if (!strcmp(name, "AllocateCommandBuffers"))
3763 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3764 if (!strcmp(name, "FreeCommandBuffers"))
3765 return (PFN_vkVoidFunction)FreeCommandBuffers;
3766 if (!strcmp(name, "BeginCommandBuffer"))
3767 return (PFN_vkVoidFunction)BeginCommandBuffer;
3768 if (!strcmp(name, "EndCommandBuffer"))
3769 return (PFN_vkVoidFunction)EndCommandBuffer;
3770 if (!strcmp(name, "ResetCommandBuffer"))
3771 return (PFN_vkVoidFunction)ResetCommandBuffer;
3772 if (!strcmp(name, "CmdBindPipeline"))
3773 return (PFN_vkVoidFunction)CmdBindPipeline;
3774 if (!strcmp(name, "CmdSetViewport"))
3775 return (PFN_vkVoidFunction)CmdSetViewport;
3776 if (!strcmp(name, "CmdSetScissor"))
3777 return (PFN_vkVoidFunction)CmdSetScissor;
3778 if (!strcmp(name, "CmdSetLineWidth"))
3779 return (PFN_vkVoidFunction)CmdSetLineWidth;
3780 if (!strcmp(name, "CmdSetDepthBias"))
3781 return (PFN_vkVoidFunction)CmdSetDepthBias;
3782 if (!strcmp(name, "CmdSetBlendConstants"))
3783 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3784 if (!strcmp(name, "CmdSetDepthBounds"))
3785 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3786 if (!strcmp(name, "CmdSetStencilCompareMask"))
3787 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3788 if (!strcmp(name, "CmdSetStencilWriteMask"))
3789 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3790 if (!strcmp(name, "CmdSetStencilReference"))
3791 return (PFN_vkVoidFunction)CmdSetStencilReference;
3792 if (!strcmp(name, "CmdBindDescriptorSets"))
3793 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3794 if (!strcmp(name, "CmdBindIndexBuffer"))
3795 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3796 if (!strcmp(name, "CmdBindVertexBuffers"))
3797 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3798 if (!strcmp(name, "CmdDraw"))
3799 return (PFN_vkVoidFunction)CmdDraw;
3800 if (!strcmp(name, "CmdDrawIndexed"))
3801 return (PFN_vkVoidFunction)CmdDrawIndexed;
3802 if (!strcmp(name, "CmdDrawIndirect"))
3803 return (PFN_vkVoidFunction)CmdDrawIndirect;
3804 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3805 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3806 if (!strcmp(name, "CmdDispatch"))
3807 return (PFN_vkVoidFunction)CmdDispatch;
3808 if (!strcmp(name, "CmdDispatchIndirect"))
3809 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3810 if (!strcmp(name, "CmdCopyBuffer"))
3811 return (PFN_vkVoidFunction)CmdCopyBuffer;
3812 if (!strcmp(name, "CmdCopyImage"))
3813 return (PFN_vkVoidFunction)CmdCopyImage;
3814 if (!strcmp(name, "CmdBlitImage"))
3815 return (PFN_vkVoidFunction)CmdBlitImage;
3816 if (!strcmp(name, "CmdCopyBufferToImage"))
3817 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3818 if (!strcmp(name, "CmdCopyImageToBuffer"))
3819 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3820 if (!strcmp(name, "CmdUpdateBuffer"))
3821 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3822 if (!strcmp(name, "CmdFillBuffer"))
3823 return (PFN_vkVoidFunction)CmdFillBuffer;
3824 if (!strcmp(name, "CmdClearColorImage"))
3825 return (PFN_vkVoidFunction)CmdClearColorImage;
3826 if (!strcmp(name, "CmdClearDepthStencilImage"))
3827 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3828 if (!strcmp(name, "CmdClearAttachments"))
3829 return (PFN_vkVoidFunction)CmdClearAttachments;
3830 if (!strcmp(name, "CmdResolveImage"))
3831 return (PFN_vkVoidFunction)CmdResolveImage;
3832 if (!strcmp(name, "CmdSetEvent"))
3833 return (PFN_vkVoidFunction)CmdSetEvent;
3834 if (!strcmp(name, "CmdResetEvent"))
3835 return (PFN_vkVoidFunction)CmdResetEvent;
3836 if (!strcmp(name, "CmdWaitEvents"))
3837 return (PFN_vkVoidFunction)CmdWaitEvents;
3838 if (!strcmp(name, "CmdPipelineBarrier"))
3839 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3840 if (!strcmp(name, "CmdBeginQuery"))
3841 return (PFN_vkVoidFunction)CmdBeginQuery;
3842 if (!strcmp(name, "CmdEndQuery"))
3843 return (PFN_vkVoidFunction)CmdEndQuery;
3844 if (!strcmp(name, "CmdResetQueryPool"))
3845 return (PFN_vkVoidFunction)CmdResetQueryPool;
3846 if (!strcmp(name, "CmdWriteTimestamp"))
3847 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3848 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3849 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3850 if (!strcmp(name, "CmdPushConstants"))
3851 return (PFN_vkVoidFunction)CmdPushConstants;
3852 if (!strcmp(name, "CmdBeginRenderPass"))
3853 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3854 if (!strcmp(name, "CmdNextSubpass"))
3855 return (PFN_vkVoidFunction)CmdNextSubpass;
3856 if (!strcmp(name, "CmdEndRenderPass"))
3857 return (PFN_vkVoidFunction)CmdEndRenderPass;
3858 if (!strcmp(name, "CmdExecuteCommands"))
3859 return (PFN_vkVoidFunction)CmdExecuteCommands;
3860
3861 return NULL;
3862}
3863static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3864 if (!name || name[0] != 'v' || name[1] != 'k')
3865 return NULL;
3866
3867 name += 2;
3868 if (!strcmp(name, "CreateInstance"))
3869 return (PFN_vkVoidFunction)CreateInstance;
3870 if (!strcmp(name, "DestroyInstance"))
3871 return (PFN_vkVoidFunction)DestroyInstance;
3872 if (!strcmp(name, "EnumeratePhysicalDevices"))
3873 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
3874 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
3875 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
3876 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
3877 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
3878 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
3879 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
3880 if (!strcmp(name, "GetPhysicalDeviceProperties"))
3881 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
3882 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
3883 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
3884 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
3885 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
3886 if (!strcmp(name, "GetInstanceProcAddr"))
3887 return (PFN_vkVoidFunction)GetInstanceProcAddr;
3888 if (!strcmp(name, "CreateDevice"))
3889 return (PFN_vkVoidFunction)CreateDevice;
3890 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
3891 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
3892 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
3893 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
3894 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
3895 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
3896 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
3897 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
3898
3899 return NULL;
3900}
3901
3902static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
3903 if (device) {
3904 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Mark Youngead9b932016-09-08 12:28:38 -06003905
3906 if (device_data->wsi_enabled) {
3907 if (!strcmp("vkCreateSwapchainKHR", name))
3908 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
3909 if (!strcmp("vkDestroySwapchainKHR", name))
3910 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
3911 if (!strcmp("vkGetSwapchainImagesKHR", name))
3912 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
3913 if (!strcmp("vkAcquireNextImageKHR", name))
3914 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
3915 if (!strcmp("vkQueuePresentKHR", name))
3916 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
3917 }
3918
3919 if (device_data->wsi_display_swapchain_enabled) {
3920 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
3921 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
3922 }
3923 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003924 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003925
3926 return nullptr;
3927}
3928
3929VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
3930 PFN_vkVoidFunction addr;
3931 addr = InterceptCoreDeviceCommand(funcName);
3932 if (addr) {
3933 return addr;
3934 }
3935 assert(device);
3936
3937 addr = InterceptWsiEnabledCommand(funcName, device);
3938 if (addr) {
3939 return addr;
3940 }
3941 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
3942 return NULL;
3943 }
3944 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
3945}
3946
3947VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
3948 PFN_vkVoidFunction addr;
3949 addr = InterceptCoreInstanceCommand(funcName);
3950 if (!addr) {
3951 addr = InterceptCoreDeviceCommand(funcName);
3952 }
3953 if (!addr) {
3954 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
3955 }
3956 if (addr) {
3957 return addr;
3958 }
3959 assert(instance);
3960
3961 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
3962 if (addr) {
3963 return addr;
3964 }
3965 addr = InterceptWsiEnabledCommand(funcName, instance);
3966 if (addr) {
3967 return addr;
3968 }
3969 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
3970 return NULL;
3971 }
3972 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
3973}
3974
3975} // namespace object_tracker
3976
3977// vk_layer_logging.h expects these to be defined
3978VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
3979 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
3980 const VkAllocationCallbacks *pAllocator,
3981 VkDebugReportCallbackEXT *pMsgCallback) {
3982 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
3983}
3984
3985VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
3986 const VkAllocationCallbacks *pAllocator) {
3987 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
3988}
3989
3990VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
3991 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
3992 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
3993 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
3994}
3995
3996// Loader-layer interface v0, just wrappers since there is only a layer
3997VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3998 VkExtensionProperties *pProperties) {
3999 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
4000}
4001
4002VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
4003 VkLayerProperties *pProperties) {
4004 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
4005}
4006
4007VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
4008 VkLayerProperties *pProperties) {
4009 // The layer command handles VK_NULL_HANDLE just fine internally
4010 assert(physicalDevice == VK_NULL_HANDLE);
4011 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
4012}
4013
4014VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
4015 return object_tracker::GetDeviceProcAddr(dev, funcName);
4016}
4017
4018VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
4019 return object_tracker::GetInstanceProcAddr(instance, funcName);
4020}
4021
4022VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
4023 const char *pLayerName, uint32_t *pCount,
4024 VkExtensionProperties *pProperties) {
4025 // The layer command handles VK_NULL_HANDLE just fine internally
4026 assert(physicalDevice == VK_NULL_HANDLE);
4027 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
Mark Lobodzinski38080682016-07-22 15:30:27 -06004028}