blob: 1e9ed3e2866c058fd6796b3b286c045f773d2c45 [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600155 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
156 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64
157 ". Was it created? Has it already been destroyed?",
158 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600200 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
201 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64
202 ". Was it created? Has it already been destroyed?",
203 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
245template <typename T1, typename T2>
Chris Forbesfeecd402016-09-29 14:53:50 +1300246static void CreateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600247 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
Chris Forbesfeecd402016-09-29 14:53:50 +1300248 auto object_handle = reinterpret_cast<uint64_t &>(object);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600249
Chris Forbesfeecd402016-09-29 14:53:50 +1300250 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, object_handle,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600251 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
Chris Forbesfeecd402016-09-29 14:53:50 +1300252 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600253
254 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
255 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300256 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Chris Forbesfeecd402016-09-29 14:53:50 +1300257 pNewObjNode->handle = object_handle;
258 instance_data->object_map[object_type][object_handle] = pNewObjNode;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600259 instance_data->num_objects[object_type]++;
260 instance_data->num_total_objects++;
261}
262
263template <typename T1, typename T2>
Chris Forbesec461992016-09-29 14:41:44 +1300264static void DestroyObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool custom_allocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600265 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
266
267 uint64_t object_handle = reinterpret_cast<uint64_t &>(object);
268
269 auto item = device_data->object_map[object_type].find(object_handle);
270 if (item != device_data->object_map[object_type].end()) {
271
272 OBJTRACK_NODE *pNode = item->second;
273 assert(device_data->num_total_objects > 0);
274 device_data->num_total_objects--;
275 assert(device_data->num_objects[pNode->object_type] > 0);
276 device_data->num_objects[pNode->object_type]--;
277
278 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
279 OBJTRACK_NONE, LayerName,
280 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
281 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
282 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
283
Chris Forbes3e51a202016-09-29 14:35:09 +1300284 auto allocated_with_custom = pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR;
285 if (custom_allocator ^ allocated_with_custom) {
286 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__,
287 OBJTRACK_ALLOCATOR_MISMATCH, LayerName,
288 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation",
289 (custom_allocator ? "" : "not "), object_name[object_type], object_handle,
290 (allocated_with_custom ? "" : "not "));
291 }
292
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600293 delete pNode;
294 device_data->object_map[object_type].erase(item);
295 } else {
296 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
297 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600298 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
299 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600300 }
301}
302
303template <typename T1, typename T2>
304static bool ValidateDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
305 bool null_allowed) {
306 if (null_allowed && (object == VK_NULL_HANDLE)) {
307 return false;
308 }
309 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
310
311 if (instance_data->object_map[object_type].find(reinterpret_cast<uint64_t>(object)) ==
312 instance_data->object_map[object_type].end()) {
313 return log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, reinterpret_cast<uint64_t>(object),
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600314 __LINE__, OBJTRACK_INVALID_OBJECT, LayerName, "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600315 reinterpret_cast<uint64_t>(object));
316 }
317 return false;
318}
319
320template <typename T1, typename T2>
321static bool ValidateNonDispatchableObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
322 bool null_allowed) {
323 if (null_allowed && (object == VK_NULL_HANDLE)) {
324 return false;
325 }
326 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
327 if (device_data->object_map[object_type].find(reinterpret_cast<uint64_t &>(object)) ==
328 device_data->object_map[object_type].end()) {
329 // If object is an image, also look for it in the swapchain image map
330 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
331 (device_data->swapchainImageMap.find(reinterpret_cast<uint64_t &>(object)) == device_data->swapchainImageMap.end())) {
332 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
333 reinterpret_cast<uint64_t &>(object), __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600334 "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type], reinterpret_cast<uint64_t &>(object));
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600335 }
336 }
337 return false;
338}
339
340static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
341 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
342 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
343 OBJTRACK_NODE *object_info = item->second;
344 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
345 OBJTRACK_OBJECT_LEAK, LayerName,
346 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
347 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
348 item = device_data->object_map[object_type].erase(item);
349 }
350}
351
352VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
353 std::unique_lock<std::mutex> lock(global_lock);
354
355 dispatch_key key = get_dispatch_key(instance);
356 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
357
358 // Enable the temporary callback(s) here to catch cleanup issues:
359 bool callback_setup = false;
360 if (instance_data->num_tmp_callbacks > 0) {
361 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
362 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
363 callback_setup = true;
364 }
365 }
366
367 ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
368
Chris Forbesec461992016-09-29 14:41:44 +1300369 DestroyObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600370 // Report any remaining objects in LL
371
372 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
373 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
374 OBJTRACK_NODE *pNode = iit->second;
375
376 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
377
378 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
379 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
380 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
381 // Semaphore:
382 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
383 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
384 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
385 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
386 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
387 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
388 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
389 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
390 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
391 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
392 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
393 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
394 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
395 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
396 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
397 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
398 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
399 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
400 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
401 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
402 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
403 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
404 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
405 }
406 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
407
408 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
409 pInstanceTable->DestroyInstance(instance, pAllocator);
410
411 // Disable and cleanup the temporary callback(s):
412 if (callback_setup) {
413 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
414 }
415 if (instance_data->num_tmp_callbacks > 0) {
416 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
417 instance_data->num_tmp_callbacks = 0;
418 }
419
420 // Clean up logging callback, if any
421 while (instance_data->logging_callback.size() > 0) {
422 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
423 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
424 instance_data->logging_callback.pop_back();
425 }
426
427 layer_debug_report_destroy_instance(instance_data->report_data);
428 layer_data_map.erase(key);
429
430 instanceExtMap.erase(pInstanceTable);
431 lock.unlock();
432 ot_instance_table_map.erase(key);
433}
434
435VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
436
437 std::unique_lock<std::mutex> lock(global_lock);
438 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Chris Forbesec461992016-09-29 14:41:44 +1300439 DestroyObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600440
441 // Report any remaining objects associated with this VkDevice object in LL
442 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
443 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
444 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
445 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
446 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
447 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
448 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
449 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
450 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
451 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
452 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
453 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
454 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
455 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
456 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
457 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
458 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
459 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
460 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
461 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
462 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
463 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
464 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
465
466 // Clean up Queue's MemRef Linked Lists
467 DestroyQueueDataStructures(device);
468
469 lock.unlock();
470
471 dispatch_key key = get_dispatch_key(device);
472 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
473 pDisp->DestroyDevice(device, pAllocator);
474 ot_device_table_map.erase(key);
475}
476
477VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
478 bool skip_call = false;
479 {
480 std::lock_guard<std::mutex> lock(global_lock);
481 skip_call |=
482 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
483 }
484 if (skip_call) {
485 return;
486 }
487 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
488}
489
490VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
491 VkFormatProperties *pFormatProperties) {
492 bool skip_call = false;
493 {
494 std::lock_guard<std::mutex> lock(global_lock);
495 skip_call |=
496 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
497 }
498 if (skip_call) {
499 return;
500 }
501 get_dispatch_table(ot_instance_table_map, physicalDevice)
502 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
503}
504
505VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
506 VkImageType type, VkImageTiling tiling,
507 VkImageUsageFlags usage, VkImageCreateFlags flags,
508 VkImageFormatProperties *pImageFormatProperties) {
509 bool skip_call = false;
510 {
511 std::lock_guard<std::mutex> lock(global_lock);
512 skip_call |=
513 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
514 }
515 if (skip_call) {
516 return VK_ERROR_VALIDATION_FAILED_EXT;
517 }
518 VkResult result =
519 get_dispatch_table(ot_instance_table_map, physicalDevice)
520 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
521 return result;
522}
523
524VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
525 bool skip_call = false;
526 {
527 std::lock_guard<std::mutex> lock(global_lock);
528 skip_call |=
529 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
530 }
531 if (skip_call) {
532 return;
533 }
534 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
535}
536
537VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
538 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
539 bool skip_call = false;
540 {
541 std::lock_guard<std::mutex> lock(global_lock);
542 skip_call |=
543 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
544 }
545 if (skip_call) {
546 return;
547 }
548 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
549}
550
551VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
552
553VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
554
555VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
556 VkExtensionProperties *pProperties);
557
558VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
559
560VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
561 VkLayerProperties *pProperties);
562
563VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
564 bool skip_call = false;
565 {
566 std::lock_guard<std::mutex> lock(global_lock);
567 skip_call |= ValidateNonDispatchableObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
568 if (pSubmits) {
569 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
570 if (pSubmits[idx0].pCommandBuffers) {
571 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
572 skip_call |= ValidateDispatchableObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
573 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
574 }
575 }
576 if (pSubmits[idx0].pSignalSemaphores) {
577 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
578 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
579 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
580 }
581 }
582 if (pSubmits[idx0].pWaitSemaphores) {
583 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
584 skip_call |= ValidateNonDispatchableObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
585 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
586 }
587 }
588 }
589 }
590 if (queue) {
591 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
592 }
593 }
594 if (skip_call) {
595 return VK_ERROR_VALIDATION_FAILED_EXT;
596 }
597 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
598 return result;
599}
600
601VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
602 bool skip_call = false;
603 {
604 std::lock_guard<std::mutex> lock(global_lock);
605 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
606 }
607 if (skip_call) {
608 return VK_ERROR_VALIDATION_FAILED_EXT;
609 }
610 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
611 return result;
612}
613
614VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
615 bool skip_call = false;
616 {
617 std::lock_guard<std::mutex> lock(global_lock);
618 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
619 }
620 if (skip_call) {
621 return VK_ERROR_VALIDATION_FAILED_EXT;
622 }
623 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
624 return result;
625}
626
627VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
628 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
629 bool skip_call = false;
630 {
631 std::lock_guard<std::mutex> lock(global_lock);
632 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
633 }
634 if (skip_call) {
635 return VK_ERROR_VALIDATION_FAILED_EXT;
636 }
637 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
638 {
639 std::lock_guard<std::mutex> lock(global_lock);
640 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300641 CreateObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600642 }
643 }
644 return result;
645}
646
647VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
648 const VkMappedMemoryRange *pMemoryRanges) {
649 bool skip_call = false;
650 {
651 std::lock_guard<std::mutex> lock(global_lock);
652 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
653 if (pMemoryRanges) {
654 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
655 if (pMemoryRanges[idx0].memory) {
656 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
657 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
658 }
659 }
660 }
661 }
662 if (skip_call) {
663 return VK_ERROR_VALIDATION_FAILED_EXT;
664 }
665 VkResult result =
666 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
667 return result;
668}
669
670VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
671 const VkMappedMemoryRange *pMemoryRanges) {
672 bool skip_call = false;
673 {
674 std::lock_guard<std::mutex> lock(global_lock);
675 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
676 if (pMemoryRanges) {
677 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
678 if (pMemoryRanges[idx0].memory) {
679 skip_call |= ValidateNonDispatchableObject(device, pMemoryRanges[idx0].memory,
680 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
681 }
682 }
683 }
684 }
685 if (skip_call) {
686 return VK_ERROR_VALIDATION_FAILED_EXT;
687 }
688 VkResult result =
689 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
690 return result;
691}
692
693VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
694 VkDeviceSize *pCommittedMemoryInBytes) {
695 bool skip_call = false;
696 {
697 std::lock_guard<std::mutex> lock(global_lock);
698 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
699 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
700 }
701 if (skip_call) {
702 return;
703 }
704 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
705}
706
707VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
708 VkDeviceSize memoryOffset) {
709 bool skip_call = false;
710 {
711 std::lock_guard<std::mutex> lock(global_lock);
712 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
713 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
714 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
715 }
716 if (skip_call) {
717 return VK_ERROR_VALIDATION_FAILED_EXT;
718 }
719 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
720 return result;
721}
722
723VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
724 bool skip_call = false;
725 {
726 std::lock_guard<std::mutex> lock(global_lock);
727 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
728 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
729 skip_call |= ValidateNonDispatchableObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
730 }
731 if (skip_call) {
732 return VK_ERROR_VALIDATION_FAILED_EXT;
733 }
734 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
735 return result;
736}
737
738VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
739 VkMemoryRequirements *pMemoryRequirements) {
740 bool skip_call = false;
741 {
742 std::lock_guard<std::mutex> lock(global_lock);
743 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
744 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
745 }
746 if (skip_call) {
747 return;
748 }
749 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
750}
751
752VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
753 bool skip_call = false;
754 {
755 std::lock_guard<std::mutex> lock(global_lock);
756 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
757 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
758 }
759 if (skip_call) {
760 return;
761 }
762 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
763}
764
765VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
766 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
767 bool skip_call = false;
768 {
769 std::lock_guard<std::mutex> lock(global_lock);
770 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
771 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
772 }
773 if (skip_call) {
774 return;
775 }
776 get_dispatch_table(ot_device_table_map, device)
777 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
778}
779
780VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
781 VkImageType type, VkSampleCountFlagBits samples,
782 VkImageUsageFlags usage, VkImageTiling tiling,
783 uint32_t *pPropertyCount,
784 VkSparseImageFormatProperties *pProperties) {
785 bool skip_call = false;
786 {
787 std::lock_guard<std::mutex> lock(global_lock);
788 skip_call |=
789 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
790 }
791 if (skip_call) {
792 return;
793 }
794 get_dispatch_table(ot_instance_table_map, physicalDevice)
795 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
796 pProperties);
797}
798
799VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
800 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
801 bool skip_call = false;
802 {
803 std::lock_guard<std::mutex> lock(global_lock);
804 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
805 }
806 if (skip_call) {
807 return VK_ERROR_VALIDATION_FAILED_EXT;
808 }
809 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
810 {
811 std::lock_guard<std::mutex> lock(global_lock);
812 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300813 CreateObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600814 }
815 }
816 return result;
817}
818
819VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
820 bool skip_call = false;
821 {
822 std::lock_guard<std::mutex> lock(global_lock);
823 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
824 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
825 }
826 if (skip_call) {
827 return;
828 }
829 {
830 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300831 DestroyObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600832 }
833 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
834}
835
836VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
837 bool skip_call = false;
838 {
839 std::lock_guard<std::mutex> lock(global_lock);
840 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
841 if (pFences) {
842 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
843 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
844 }
845 }
846 }
847 if (skip_call) {
848 return VK_ERROR_VALIDATION_FAILED_EXT;
849 }
850 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
851 return result;
852}
853
854VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
855 bool skip_call = false;
856 {
857 std::lock_guard<std::mutex> lock(global_lock);
858 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
859 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
860 }
861 if (skip_call) {
862 return VK_ERROR_VALIDATION_FAILED_EXT;
863 }
864 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
865 return result;
866}
867
868VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
869 uint64_t timeout) {
870 bool skip_call = false;
871 {
872 std::lock_guard<std::mutex> lock(global_lock);
873 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
874 if (pFences) {
875 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
876 skip_call |= ValidateNonDispatchableObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
877 }
878 }
879 }
880 if (skip_call) {
881 return VK_ERROR_VALIDATION_FAILED_EXT;
882 }
883 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
884 return result;
885}
886
887VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
888 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
889 bool skip_call = false;
890 {
891 std::lock_guard<std::mutex> lock(global_lock);
892 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
893 }
894 if (skip_call) {
895 return VK_ERROR_VALIDATION_FAILED_EXT;
896 }
897 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
898 {
899 std::lock_guard<std::mutex> lock(global_lock);
900 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300901 CreateObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600902 }
903 }
904 return result;
905}
906
907VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
908 bool skip_call = false;
909 {
910 std::lock_guard<std::mutex> lock(global_lock);
911 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
912 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
913 }
914 if (skip_call) {
915 return;
916 }
917 {
918 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300919 DestroyObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600920 }
921 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
922}
923
924VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
925 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
926 bool skip_call = false;
927 {
928 std::lock_guard<std::mutex> lock(global_lock);
929 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
930 }
931 if (skip_call) {
932 return VK_ERROR_VALIDATION_FAILED_EXT;
933 }
934 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
935 {
936 std::lock_guard<std::mutex> lock(global_lock);
937 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300938 CreateObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600939 }
940 }
941 return result;
942}
943
944VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
945 bool skip_call = false;
946 {
947 std::lock_guard<std::mutex> lock(global_lock);
948 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
949 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
950 }
951 if (skip_call) {
952 return;
953 }
954 {
955 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300956 DestroyObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600957 }
958 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
959}
960
961VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
962 bool skip_call = false;
963 {
964 std::lock_guard<std::mutex> lock(global_lock);
965 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
966 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
967 }
968 if (skip_call) {
969 return VK_ERROR_VALIDATION_FAILED_EXT;
970 }
971 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
972 return result;
973}
974
975VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
976 bool skip_call = false;
977 {
978 std::lock_guard<std::mutex> lock(global_lock);
979 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
980 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
981 }
982 if (skip_call) {
983 return VK_ERROR_VALIDATION_FAILED_EXT;
984 }
985 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
986 return result;
987}
988
989VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
990 bool skip_call = false;
991 {
992 std::lock_guard<std::mutex> lock(global_lock);
993 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
994 skip_call |= ValidateNonDispatchableObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
995 }
996 if (skip_call) {
997 return VK_ERROR_VALIDATION_FAILED_EXT;
998 }
999 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
1000 return result;
1001}
1002
1003VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1004 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1005 bool skip_call = false;
1006 {
1007 std::lock_guard<std::mutex> lock(global_lock);
1008 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1009 }
1010 if (skip_call) {
1011 return VK_ERROR_VALIDATION_FAILED_EXT;
1012 }
1013 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1014 {
1015 std::lock_guard<std::mutex> lock(global_lock);
1016 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001017 CreateObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001018 }
1019 }
1020 return result;
1021}
1022
1023VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1024 bool skip_call = false;
1025 {
1026 std::lock_guard<std::mutex> lock(global_lock);
1027 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1028 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1029 }
1030 if (skip_call) {
1031 return;
1032 }
1033 {
1034 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001035 DestroyObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001036 }
1037 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1038}
1039
1040VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1041 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1042 bool skip_call = false;
1043 {
1044 std::lock_guard<std::mutex> lock(global_lock);
1045 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1046 skip_call |= ValidateNonDispatchableObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
1047 }
1048 if (skip_call) {
1049 return VK_ERROR_VALIDATION_FAILED_EXT;
1050 }
1051 VkResult result = get_dispatch_table(ot_device_table_map, device)
1052 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1053 return result;
1054}
1055
1056VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1057 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1058 bool skip_call = false;
1059 {
1060 std::lock_guard<std::mutex> lock(global_lock);
1061 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1062 }
1063 if (skip_call) {
1064 return VK_ERROR_VALIDATION_FAILED_EXT;
1065 }
1066 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1067 {
1068 std::lock_guard<std::mutex> lock(global_lock);
1069 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001070 CreateObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001071 }
1072 }
1073 return result;
1074}
1075
1076VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1077 bool skip_call = false;
1078 {
1079 std::lock_guard<std::mutex> lock(global_lock);
1080 skip_call |= ValidateNonDispatchableObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1081 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1082 }
1083 if (skip_call) {
1084 return;
1085 }
1086 {
1087 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001088 DestroyObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001089 }
1090 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1091}
1092
1093VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1094 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1095 bool skip_call = false;
1096 {
1097 std::lock_guard<std::mutex> lock(global_lock);
1098 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1099 if (pCreateInfo) {
1100 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1101 }
1102 }
1103 if (skip_call) {
1104 return VK_ERROR_VALIDATION_FAILED_EXT;
1105 }
1106 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1107 {
1108 std::lock_guard<std::mutex> lock(global_lock);
1109 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001110 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001111 }
1112 }
1113 return result;
1114}
1115
1116VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1117 bool skip_call = false;
1118 {
1119 std::lock_guard<std::mutex> lock(global_lock);
1120 skip_call |= ValidateNonDispatchableObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1121 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1122 }
1123 if (skip_call) {
1124 return;
1125 }
1126 {
1127 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001128 DestroyObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001129 }
1130 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1131}
1132
1133VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1134 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1135 bool skip_call = false;
1136 {
1137 std::lock_guard<std::mutex> lock(global_lock);
1138 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1139 }
1140 if (skip_call) {
1141 return VK_ERROR_VALIDATION_FAILED_EXT;
1142 }
1143 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1144 {
1145 std::lock_guard<std::mutex> lock(global_lock);
1146 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001147 CreateObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001148 }
1149 }
1150 return result;
1151}
1152
1153VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1154 bool skip_call = false;
1155 {
1156 std::lock_guard<std::mutex> lock(global_lock);
1157 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1158 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1159 }
1160 if (skip_call) {
1161 return;
1162 }
1163 {
1164 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001165 DestroyObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001166 }
1167 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1168}
1169
1170VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1171 VkSubresourceLayout *pLayout) {
1172 bool skip_call = false;
1173 {
1174 std::lock_guard<std::mutex> lock(global_lock);
1175 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1176 skip_call |= ValidateNonDispatchableObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1177 }
1178 if (skip_call) {
1179 return;
1180 }
1181 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1182}
1183
1184VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1185 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1186 bool skip_call = false;
1187 {
1188 std::lock_guard<std::mutex> lock(global_lock);
1189 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1190 if (pCreateInfo) {
1191 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
1192 }
1193 }
1194 if (skip_call) {
1195 return VK_ERROR_VALIDATION_FAILED_EXT;
1196 }
1197 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1198 {
1199 std::lock_guard<std::mutex> lock(global_lock);
1200 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001201 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001202 }
1203 }
1204 return result;
1205}
1206
1207VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1208 bool skip_call = false;
1209 {
1210 std::lock_guard<std::mutex> lock(global_lock);
1211 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1212 skip_call |= ValidateNonDispatchableObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1213 }
1214 if (skip_call) {
1215 return;
1216 }
1217 {
1218 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001219 DestroyObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001220 }
1221 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1222}
1223
1224VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1225 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1226 bool skip_call = false;
1227 {
1228 std::lock_guard<std::mutex> lock(global_lock);
1229 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1230 }
1231 if (skip_call) {
1232 return VK_ERROR_VALIDATION_FAILED_EXT;
1233 }
1234 VkResult result =
1235 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1236 {
1237 std::lock_guard<std::mutex> lock(global_lock);
1238 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001239 CreateObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001240 }
1241 }
1242 return result;
1243}
1244
1245VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1246 const VkAllocationCallbacks *pAllocator) {
1247 bool skip_call = false;
1248 {
1249 std::lock_guard<std::mutex> lock(global_lock);
1250 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1251 skip_call |= ValidateNonDispatchableObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
1252 }
1253 if (skip_call) {
1254 return;
1255 }
1256 {
1257 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001258 DestroyObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001259 }
1260 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1261}
1262
1263VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1264 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1265 bool skip_call = false;
1266 {
1267 std::lock_guard<std::mutex> lock(global_lock);
1268 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1269 }
1270 if (skip_call) {
1271 return VK_ERROR_VALIDATION_FAILED_EXT;
1272 }
1273 VkResult result =
1274 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1275 {
1276 std::lock_guard<std::mutex> lock(global_lock);
1277 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001278 CreateObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001279 }
1280 }
1281 return result;
1282}
1283
1284VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1285 const VkAllocationCallbacks *pAllocator) {
1286 bool skip_call = false;
1287 {
1288 std::lock_guard<std::mutex> lock(global_lock);
1289 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1290 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1291 }
1292 if (skip_call) {
1293 return;
1294 }
1295 {
1296 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001297 DestroyObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001298 }
1299 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1300}
1301
1302VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1303 void *pData) {
1304 bool skip_call = false;
1305 {
1306 std::lock_guard<std::mutex> lock(global_lock);
1307 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1308 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1309 }
1310 if (skip_call) {
1311 return VK_ERROR_VALIDATION_FAILED_EXT;
1312 }
1313 VkResult result =
1314 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1315 return result;
1316}
1317
1318VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1319 const VkPipelineCache *pSrcCaches) {
1320 bool skip_call = false;
1321 {
1322 std::lock_guard<std::mutex> lock(global_lock);
1323 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1324 skip_call |= ValidateNonDispatchableObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1325 if (pSrcCaches) {
1326 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1327 skip_call |=
1328 ValidateNonDispatchableObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
1329 }
1330 }
1331 }
1332 if (skip_call) {
1333 return VK_ERROR_VALIDATION_FAILED_EXT;
1334 }
1335 VkResult result =
1336 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1337 return result;
1338}
1339
1340VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1341 bool skip_call = false;
1342 {
1343 std::lock_guard<std::mutex> lock(global_lock);
1344 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1345 skip_call |= ValidateNonDispatchableObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1346 }
1347 if (skip_call) {
1348 return;
1349 }
1350 {
1351 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001352 DestroyObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001353 }
1354 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1355}
1356
1357VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1358 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1359 bool skip_call = false;
1360 {
1361 std::lock_guard<std::mutex> lock(global_lock);
1362 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1363 if (pCreateInfo) {
1364 if (pCreateInfo->pSetLayouts) {
1365 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
1366 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pSetLayouts[idx0],
1367 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1368 }
1369 }
1370 }
1371 }
1372 if (skip_call) {
1373 return VK_ERROR_VALIDATION_FAILED_EXT;
1374 }
1375 VkResult result =
1376 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1377 {
1378 std::lock_guard<std::mutex> lock(global_lock);
1379 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001380 CreateObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001381 }
1382 }
1383 return result;
1384}
1385
1386VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1387 const VkAllocationCallbacks *pAllocator) {
1388 bool skip_call = false;
1389 {
1390 std::lock_guard<std::mutex> lock(global_lock);
1391 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1392 skip_call |= ValidateNonDispatchableObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1393 }
1394 if (skip_call) {
1395 return;
1396 }
1397 {
1398 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001399 DestroyObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001400 }
1401 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1402}
1403
1404VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1405 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1406 bool skip_call = false;
1407 {
1408 std::lock_guard<std::mutex> lock(global_lock);
1409 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1410 }
1411 if (skip_call) {
1412 return VK_ERROR_VALIDATION_FAILED_EXT;
1413 }
1414 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1415 {
1416 std::lock_guard<std::mutex> lock(global_lock);
1417 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001418 CreateObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001419 }
1420 }
1421 return result;
1422}
1423
1424VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1425 bool skip_call = false;
1426 {
1427 std::lock_guard<std::mutex> lock(global_lock);
1428 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1429 skip_call |= ValidateNonDispatchableObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1430 }
1431 if (skip_call) {
1432 return;
1433 }
1434 {
1435 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001436 DestroyObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001437 }
1438 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1439}
1440
1441VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1442 const VkAllocationCallbacks *pAllocator,
1443 VkDescriptorSetLayout *pSetLayout) {
1444 bool skip_call = false;
1445 {
1446 std::lock_guard<std::mutex> lock(global_lock);
1447 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1448 if (pCreateInfo) {
1449 if (pCreateInfo->pBindings) {
1450 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1451 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1452 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1453 skip_call |=
1454 ValidateNonDispatchableObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
1455 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1456 }
1457 }
1458 }
1459 }
1460 }
1461 }
1462 if (skip_call) {
1463 return VK_ERROR_VALIDATION_FAILED_EXT;
1464 }
1465 VkResult result =
1466 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1467 {
1468 std::lock_guard<std::mutex> lock(global_lock);
1469 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001470 CreateObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001471 }
1472 }
1473 return result;
1474}
1475
1476VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1477 const VkAllocationCallbacks *pAllocator) {
1478 bool skip_call = false;
1479 {
1480 std::lock_guard<std::mutex> lock(global_lock);
1481 skip_call |= ValidateNonDispatchableObject(device, descriptorSetLayout,
1482 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1483 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1484 }
1485 if (skip_call) {
1486 return;
1487 }
1488 {
1489 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001490 DestroyObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001491 }
1492 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1493}
1494
1495VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1496 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1497 bool skip_call = false;
1498 {
1499 std::lock_guard<std::mutex> lock(global_lock);
1500 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1501 }
1502 if (skip_call) {
1503 return VK_ERROR_VALIDATION_FAILED_EXT;
1504 }
1505 VkResult result =
1506 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1507 {
1508 std::lock_guard<std::mutex> lock(global_lock);
1509 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001510 CreateObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001511 }
1512 }
1513 return result;
1514}
1515
1516VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1517 VkDescriptorPoolResetFlags flags) {
1518 bool skip_call = false;
1519 {
1520 std::lock_guard<std::mutex> lock(global_lock);
1521 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1522 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1523 }
1524 if (skip_call) {
1525 return VK_ERROR_VALIDATION_FAILED_EXT;
1526 }
1527 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1528 return result;
1529}
1530
1531VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1532 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1533 const VkCopyDescriptorSet *pDescriptorCopies) {
1534 bool skip_call = false;
1535 {
1536 std::lock_guard<std::mutex> lock(global_lock);
1537 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1538 if (pDescriptorCopies) {
1539 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1540 if (pDescriptorCopies[idx0].dstSet) {
1541 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].dstSet,
1542 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1543 }
1544 if (pDescriptorCopies[idx0].srcSet) {
1545 skip_call |= ValidateNonDispatchableObject(device, pDescriptorCopies[idx0].srcSet,
1546 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1547 }
1548 }
1549 }
1550 if (pDescriptorWrites) {
1551 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1552 if (pDescriptorWrites[idx1].dstSet) {
1553 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].dstSet,
1554 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1555 }
1556 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1557 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1558 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1559 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1560 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1561 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
1562 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
1563 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1564 }
1565 }
1566 }
1567 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1568 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1569 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1570 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1571 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1572 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1573 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
1574 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
1575 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1576 }
1577 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
1578 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
1579 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1580 }
1581 }
1582 }
1583 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1584 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1585 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
1586 skip_call |= ValidateNonDispatchableObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
1587 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1588 }
1589 }
1590 }
1591 }
1592 }
1593 if (skip_call) {
1594 return;
1595 }
1596 get_dispatch_table(ot_device_table_map, device)
1597 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1598}
1599
1600VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1601 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1602 bool skip_call = false;
1603 {
1604 std::lock_guard<std::mutex> lock(global_lock);
1605 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1606 if (pCreateInfo) {
1607 if (pCreateInfo->pAttachments) {
1608 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
1609 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->pAttachments[idx0],
1610 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1611 }
1612 }
1613 if (pCreateInfo->renderPass) {
1614 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->renderPass,
1615 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1616 }
1617 }
1618 }
1619 if (skip_call) {
1620 return VK_ERROR_VALIDATION_FAILED_EXT;
1621 }
1622 VkResult result =
1623 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1624 {
1625 std::lock_guard<std::mutex> lock(global_lock);
1626 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001627 CreateObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001628 }
1629 }
1630 return result;
1631}
1632
1633VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1634 bool skip_call = false;
1635 {
1636 std::lock_guard<std::mutex> lock(global_lock);
1637 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1638 skip_call |= ValidateNonDispatchableObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
1639 }
1640 if (skip_call) {
1641 return;
1642 }
1643 {
1644 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001645 DestroyObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001646 }
1647 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1648}
1649
1650VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1651 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1652 bool skip_call = false;
1653 {
1654 std::lock_guard<std::mutex> lock(global_lock);
1655 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1656 }
1657 if (skip_call) {
1658 return VK_ERROR_VALIDATION_FAILED_EXT;
1659 }
1660 VkResult result =
1661 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1662 {
1663 std::lock_guard<std::mutex> lock(global_lock);
1664 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001665 CreateObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001666 }
1667 }
1668 return result;
1669}
1670
1671VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1672 bool skip_call = false;
1673 {
1674 std::lock_guard<std::mutex> lock(global_lock);
1675 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1676 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1677 }
1678 if (skip_call) {
1679 return;
1680 }
1681 {
1682 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001683 DestroyObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001684 }
1685 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1686}
1687
1688VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1689 bool skip_call = false;
1690 {
1691 std::lock_guard<std::mutex> lock(global_lock);
1692 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1693 skip_call |= ValidateNonDispatchableObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1694 }
1695 if (skip_call) {
1696 return;
1697 }
1698 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1699}
1700
1701VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1702 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1703 bool skip_call = false;
1704 {
1705 std::lock_guard<std::mutex> lock(global_lock);
1706 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1707 }
1708 if (skip_call) {
1709 return VK_ERROR_VALIDATION_FAILED_EXT;
1710 }
1711 VkResult result =
1712 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1713 {
1714 std::lock_guard<std::mutex> lock(global_lock);
1715 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001716 CreateObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001717 }
1718 }
1719 return result;
1720}
1721
1722VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1723 bool skip_call = false;
1724 {
1725 std::lock_guard<std::mutex> lock(global_lock);
1726 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1727 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1728 }
1729 if (skip_call) {
1730 return VK_ERROR_VALIDATION_FAILED_EXT;
1731 }
1732 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1733 return result;
1734}
1735
1736VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1737 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1738 bool skip_call = false;
1739 {
1740 std::lock_guard<std::mutex> lock(global_lock);
1741 skip_call |=
1742 ValidateDispatchableObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1743 if (begin_info) {
1744 OBJTRACK_NODE *pNode =
1745 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1746 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
1747 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
1748 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
1749 skip_call |= ValidateNonDispatchableObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
1750 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1751 }
1752 }
1753 }
1754 if (skip_call) {
1755 return VK_ERROR_VALIDATION_FAILED_EXT;
1756 }
1757 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1758 return result;
1759}
1760
1761VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1762 bool skip_call = false;
1763 {
1764 std::lock_guard<std::mutex> lock(global_lock);
1765 skip_call |=
1766 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1767 }
1768 if (skip_call) {
1769 return VK_ERROR_VALIDATION_FAILED_EXT;
1770 }
1771 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1772 return result;
1773}
1774
1775VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1776 bool skip_call = false;
1777 {
1778 std::lock_guard<std::mutex> lock(global_lock);
1779 skip_call |=
1780 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1781 }
1782 if (skip_call) {
1783 return VK_ERROR_VALIDATION_FAILED_EXT;
1784 }
1785 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1786 return result;
1787}
1788
1789VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1790 VkPipeline pipeline) {
1791 bool skip_call = false;
1792 {
1793 std::lock_guard<std::mutex> lock(global_lock);
1794 skip_call |=
1795 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1796 skip_call |= ValidateNonDispatchableObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
1797 }
1798 if (skip_call) {
1799 return;
1800 }
1801 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1802}
1803
1804VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1805 const VkViewport *pViewports) {
1806 bool skip_call = false;
1807 {
1808 std::lock_guard<std::mutex> lock(global_lock);
1809 skip_call |=
1810 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1811 }
1812 if (skip_call) {
1813 return;
1814 }
1815 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1816}
1817
1818VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1819 const VkRect2D *pScissors) {
1820 bool skip_call = false;
1821 {
1822 std::lock_guard<std::mutex> lock(global_lock);
1823 skip_call |=
1824 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1825 }
1826 if (skip_call) {
1827 return;
1828 }
1829 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1830}
1831
1832VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1833 bool skip_call = false;
1834 {
1835 std::lock_guard<std::mutex> lock(global_lock);
1836 skip_call |=
1837 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1838 }
1839 if (skip_call) {
1840 return;
1841 }
1842 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1843}
1844
1845VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1846 float depthBiasSlopeFactor) {
1847 bool skip_call = false;
1848 {
1849 std::lock_guard<std::mutex> lock(global_lock);
1850 skip_call |=
1851 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1852 }
1853 if (skip_call) {
1854 return;
1855 }
1856 get_dispatch_table(ot_device_table_map, commandBuffer)
1857 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1858}
1859
1860VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1861 bool skip_call = false;
1862 {
1863 std::lock_guard<std::mutex> lock(global_lock);
1864 skip_call |=
1865 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1866 }
1867 if (skip_call) {
1868 return;
1869 }
1870 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1871}
1872
1873VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1874 bool skip_call = false;
1875 {
1876 std::lock_guard<std::mutex> lock(global_lock);
1877 skip_call |=
1878 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1879 }
1880 if (skip_call) {
1881 return;
1882 }
1883 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1884}
1885
1886VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1887 uint32_t compareMask) {
1888 bool skip_call = false;
1889 {
1890 std::lock_guard<std::mutex> lock(global_lock);
1891 skip_call |=
1892 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1893 }
1894 if (skip_call) {
1895 return;
1896 }
1897 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1898}
1899
1900VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1901 bool skip_call = false;
1902 {
1903 std::lock_guard<std::mutex> lock(global_lock);
1904 skip_call |=
1905 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1906 }
1907 if (skip_call) {
1908 return;
1909 }
1910 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1911}
1912
1913VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1914 bool skip_call = false;
1915 {
1916 std::lock_guard<std::mutex> lock(global_lock);
1917 skip_call |=
1918 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1919 }
1920 if (skip_call) {
1921 return;
1922 }
1923 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1924}
1925
1926VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1927 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1928 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1929 const uint32_t *pDynamicOffsets) {
1930 bool skip_call = false;
1931 {
1932 std::lock_guard<std::mutex> lock(global_lock);
1933 skip_call |=
1934 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1935 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
1936 if (pDescriptorSets) {
1937 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
1938 skip_call |= ValidateNonDispatchableObject(commandBuffer, pDescriptorSets[idx0],
1939 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1940 }
1941 }
1942 }
1943 if (skip_call) {
1944 return;
1945 }
1946 get_dispatch_table(ot_device_table_map, commandBuffer)
1947 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
1948 dynamicOffsetCount, pDynamicOffsets);
1949}
1950
1951VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1952 VkIndexType indexType) {
1953 bool skip_call = false;
1954 {
1955 std::lock_guard<std::mutex> lock(global_lock);
1956 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1957 skip_call |=
1958 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1959 }
1960 if (skip_call) {
1961 return;
1962 }
1963 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
1964}
1965
1966VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
1967 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
1968 bool skip_call = false;
1969 {
1970 std::lock_guard<std::mutex> lock(global_lock);
1971 skip_call |=
1972 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1973 if (pBuffers) {
1974 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
1975 skip_call |=
1976 ValidateNonDispatchableObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1977 }
1978 }
1979 }
1980 if (skip_call) {
1981 return;
1982 }
1983 get_dispatch_table(ot_device_table_map, commandBuffer)
1984 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
1985}
1986
1987VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
1988 uint32_t firstVertex, uint32_t firstInstance) {
1989 bool skip_call = false;
1990 {
1991 std::lock_guard<std::mutex> lock(global_lock);
1992 skip_call |=
1993 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1994 }
1995 if (skip_call) {
1996 return;
1997 }
1998 get_dispatch_table(ot_device_table_map, commandBuffer)
1999 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2000}
2001
2002VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2003 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2004 bool skip_call = false;
2005 {
2006 std::lock_guard<std::mutex> lock(global_lock);
2007 skip_call |=
2008 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2009 }
2010 if (skip_call) {
2011 return;
2012 }
2013 get_dispatch_table(ot_device_table_map, commandBuffer)
2014 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2015}
2016
2017VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2018 uint32_t stride) {
2019 bool skip_call = false;
2020 {
2021 std::lock_guard<std::mutex> lock(global_lock);
2022 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2023 skip_call |=
2024 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2025 }
2026 if (skip_call) {
2027 return;
2028 }
2029 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2030}
2031
2032VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2033 uint32_t drawCount, uint32_t stride) {
2034 bool skip_call = false;
2035 {
2036 std::lock_guard<std::mutex> lock(global_lock);
2037 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2038 skip_call |=
2039 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2040 }
2041 if (skip_call) {
2042 return;
2043 }
2044 get_dispatch_table(ot_device_table_map, commandBuffer)
2045 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2046}
2047
2048VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2049 bool skip_call = false;
2050 {
2051 std::lock_guard<std::mutex> lock(global_lock);
2052 skip_call |=
2053 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2054 }
2055 if (skip_call) {
2056 return;
2057 }
2058 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2059}
2060
2061VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2062 bool skip_call = false;
2063 {
2064 std::lock_guard<std::mutex> lock(global_lock);
2065 skip_call |= ValidateNonDispatchableObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2066 skip_call |=
2067 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2068 }
2069 if (skip_call) {
2070 return;
2071 }
2072 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2073}
2074
2075VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2076 uint32_t regionCount, const VkBufferCopy *pRegions) {
2077 bool skip_call = false;
2078 {
2079 std::lock_guard<std::mutex> lock(global_lock);
2080 skip_call |=
2081 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2082 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2083 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2084 }
2085 if (skip_call) {
2086 return;
2087 }
2088 get_dispatch_table(ot_device_table_map, commandBuffer)
2089 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2090}
2091
2092VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2093 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2094 const VkImageCopy *pRegions) {
2095 bool skip_call = false;
2096 {
2097 std::lock_guard<std::mutex> lock(global_lock);
2098 skip_call |=
2099 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2100 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2101 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2102 }
2103 if (skip_call) {
2104 return;
2105 }
2106 get_dispatch_table(ot_device_table_map, commandBuffer)
2107 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2108}
2109
2110VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2111 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2112 const VkImageBlit *pRegions, VkFilter filter) {
2113 bool skip_call = false;
2114 {
2115 std::lock_guard<std::mutex> lock(global_lock);
2116 skip_call |=
2117 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2118 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2119 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2120 }
2121 if (skip_call) {
2122 return;
2123 }
2124 get_dispatch_table(ot_device_table_map, commandBuffer)
2125 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2126}
2127
2128VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2129 VkImageLayout dstImageLayout, uint32_t regionCount,
2130 const VkBufferImageCopy *pRegions) {
2131 bool skip_call = false;
2132 {
2133 std::lock_guard<std::mutex> lock(global_lock);
2134 skip_call |=
2135 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2136 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2137 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2138 }
2139 if (skip_call) {
2140 return;
2141 }
2142 get_dispatch_table(ot_device_table_map, commandBuffer)
2143 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2144}
2145
2146VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2147 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2148 bool skip_call = false;
2149 {
2150 std::lock_guard<std::mutex> lock(global_lock);
2151 skip_call |=
2152 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2153 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2154 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2155 }
2156 if (skip_call) {
2157 return;
2158 }
2159 get_dispatch_table(ot_device_table_map, commandBuffer)
2160 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2161}
2162
2163VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2164 VkDeviceSize dataSize, const uint32_t *pData) {
2165 bool skip_call = false;
2166 {
2167 std::lock_guard<std::mutex> lock(global_lock);
2168 skip_call |=
2169 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2170 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2171 }
2172 if (skip_call) {
2173 return;
2174 }
2175 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2176}
2177
2178VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2179 VkDeviceSize size, uint32_t data) {
2180 bool skip_call = false;
2181 {
2182 std::lock_guard<std::mutex> lock(global_lock);
2183 skip_call |=
2184 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2185 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2186 }
2187 if (skip_call) {
2188 return;
2189 }
2190 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2191}
2192
2193VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2194 const VkClearColorValue *pColor, uint32_t rangeCount,
2195 const VkImageSubresourceRange *pRanges) {
2196 bool skip_call = false;
2197 {
2198 std::lock_guard<std::mutex> lock(global_lock);
2199 skip_call |=
2200 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2201 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2202 }
2203 if (skip_call) {
2204 return;
2205 }
2206 get_dispatch_table(ot_device_table_map, commandBuffer)
2207 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2208}
2209
2210VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2211 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2212 const VkImageSubresourceRange *pRanges) {
2213 bool skip_call = false;
2214 {
2215 std::lock_guard<std::mutex> lock(global_lock);
2216 skip_call |=
2217 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2218 skip_call |= ValidateNonDispatchableObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2219 }
2220 if (skip_call) {
2221 return;
2222 }
2223 get_dispatch_table(ot_device_table_map, commandBuffer)
2224 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2225}
2226
2227VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2228 const VkClearAttachment *pAttachments, uint32_t rectCount,
2229 const VkClearRect *pRects) {
2230 bool skip_call = false;
2231 {
2232 std::lock_guard<std::mutex> lock(global_lock);
2233 skip_call |=
2234 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2235 }
2236 if (skip_call) {
2237 return;
2238 }
2239 get_dispatch_table(ot_device_table_map, commandBuffer)
2240 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2241}
2242
2243VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2244 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2245 const VkImageResolve *pRegions) {
2246 bool skip_call = false;
2247 {
2248 std::lock_guard<std::mutex> lock(global_lock);
2249 skip_call |=
2250 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2251 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2252 skip_call |= ValidateNonDispatchableObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2253 }
2254 if (skip_call) {
2255 return;
2256 }
2257 get_dispatch_table(ot_device_table_map, commandBuffer)
2258 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2259}
2260
2261VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2262 bool skip_call = false;
2263 {
2264 std::lock_guard<std::mutex> lock(global_lock);
2265 skip_call |=
2266 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2267 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2268 }
2269 if (skip_call) {
2270 return;
2271 }
2272 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2273}
2274
2275VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2276 bool skip_call = false;
2277 {
2278 std::lock_guard<std::mutex> lock(global_lock);
2279 skip_call |=
2280 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2281 skip_call |= ValidateNonDispatchableObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2282 }
2283 if (skip_call) {
2284 return;
2285 }
2286 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2287}
2288
2289VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2290 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2291 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2292 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2293 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2294 bool skip_call = false;
2295 {
2296 std::lock_guard<std::mutex> lock(global_lock);
2297 skip_call |=
2298 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2299 if (pBufferMemoryBarriers) {
2300 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2301 if (pBufferMemoryBarriers[idx0].buffer) {
2302 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2303 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2304 }
2305 }
2306 }
2307 if (pEvents) {
2308 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2309 skip_call |=
2310 ValidateNonDispatchableObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
2311 }
2312 }
2313 if (pImageMemoryBarriers) {
2314 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2315 if (pImageMemoryBarriers[idx2].image) {
2316 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx2].image,
2317 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2318 }
2319 }
2320 }
2321 }
2322 if (skip_call) {
2323 return;
2324 }
2325 get_dispatch_table(ot_device_table_map, commandBuffer)
2326 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2327 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2328}
2329
2330VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2331 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2332 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2333 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2334 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2335 bool skip_call = false;
2336 {
2337 std::lock_guard<std::mutex> lock(global_lock);
2338 skip_call |=
2339 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2340 if (pBufferMemoryBarriers) {
2341 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2342 if (pBufferMemoryBarriers[idx0].buffer) {
2343 skip_call |= ValidateNonDispatchableObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
2344 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2345 }
2346 }
2347 }
2348 if (pImageMemoryBarriers) {
2349 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2350 if (pImageMemoryBarriers[idx1].image) {
2351 skip_call |= ValidateNonDispatchableObject(commandBuffer, pImageMemoryBarriers[idx1].image,
2352 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2353 }
2354 }
2355 }
2356 }
2357 if (skip_call) {
2358 return;
2359 }
2360 get_dispatch_table(ot_device_table_map, commandBuffer)
2361 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2362 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2363}
2364
2365VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2366 VkQueryControlFlags flags) {
2367 bool skip_call = false;
2368 {
2369 std::lock_guard<std::mutex> lock(global_lock);
2370 skip_call |=
2371 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2372 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2373 }
2374 if (skip_call) {
2375 return;
2376 }
2377 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2378}
2379
2380VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2381 bool skip_call = false;
2382 {
2383 std::lock_guard<std::mutex> lock(global_lock);
2384 skip_call |=
2385 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2386 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2387 }
2388 if (skip_call) {
2389 return;
2390 }
2391 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2392}
2393
2394VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2395 uint32_t queryCount) {
2396 bool skip_call = false;
2397 {
2398 std::lock_guard<std::mutex> lock(global_lock);
2399 skip_call |=
2400 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2401 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2402 }
2403 if (skip_call) {
2404 return;
2405 }
2406 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2407}
2408
2409VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2410 VkQueryPool queryPool, uint32_t query) {
2411 bool skip_call = false;
2412 {
2413 std::lock_guard<std::mutex> lock(global_lock);
2414 skip_call |=
2415 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2416 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2417 }
2418 if (skip_call) {
2419 return;
2420 }
2421 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2422}
2423
2424VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2425 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2426 VkDeviceSize stride, VkQueryResultFlags flags) {
2427 bool skip_call = false;
2428 {
2429 std::lock_guard<std::mutex> lock(global_lock);
2430 skip_call |=
2431 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2432 skip_call |= ValidateNonDispatchableObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2433 skip_call |= ValidateNonDispatchableObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
2434 }
2435 if (skip_call) {
2436 return;
2437 }
2438 get_dispatch_table(ot_device_table_map, commandBuffer)
2439 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2440}
2441
2442VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2443 uint32_t offset, uint32_t size, const void *pValues) {
2444 bool skip_call = false;
2445 {
2446 std::lock_guard<std::mutex> lock(global_lock);
2447 skip_call |=
2448 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2449 skip_call |= ValidateNonDispatchableObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
2450 }
2451 if (skip_call) {
2452 return;
2453 }
2454 get_dispatch_table(ot_device_table_map, commandBuffer)
2455 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2456}
2457
2458VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2459 VkSubpassContents contents) {
2460 bool skip_call = false;
2461 {
2462 std::lock_guard<std::mutex> lock(global_lock);
2463 skip_call |=
2464 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2465 if (pRenderPassBegin) {
2466 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->framebuffer,
2467 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
2468 skip_call |= ValidateNonDispatchableObject(commandBuffer, pRenderPassBegin->renderPass,
2469 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2470 }
2471 }
2472 if (skip_call) {
2473 return;
2474 }
2475 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2476}
2477
2478VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2479 bool skip_call = false;
2480 {
2481 std::lock_guard<std::mutex> lock(global_lock);
2482 skip_call |=
2483 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2484 }
2485 if (skip_call) {
2486 return;
2487 }
2488 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2489}
2490
2491VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2492 bool skip_call = false;
2493 {
2494 std::lock_guard<std::mutex> lock(global_lock);
2495 skip_call |=
2496 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2497 }
2498 if (skip_call) {
2499 return;
2500 }
2501 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2502}
2503
2504VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2505 const VkCommandBuffer *pCommandBuffers) {
2506 bool skip_call = false;
2507 {
2508 std::lock_guard<std::mutex> lock(global_lock);
2509 skip_call |=
2510 ValidateDispatchableObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2511 if (pCommandBuffers) {
2512 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
2513 skip_call |= ValidateDispatchableObject(commandBuffer, pCommandBuffers[idx0],
2514 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2515 }
2516 }
2517 }
2518 if (skip_call) {
2519 return;
2520 }
2521 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2522}
2523
2524VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2525 bool skip_call = false;
2526 {
2527 std::lock_guard<std::mutex> lock(global_lock);
2528 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2529 skip_call |= ValidateNonDispatchableObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2530 }
2531 if (skip_call) {
2532 return;
2533 }
2534 {
2535 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13002536 DestroyObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002537 }
2538 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2539}
2540
2541VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2542 VkSurfaceKHR surface, VkBool32 *pSupported) {
2543 bool skip_call = false;
2544 {
2545 std::lock_guard<std::mutex> lock(global_lock);
2546 skip_call |=
2547 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2548 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2549 }
2550 if (skip_call) {
2551 return VK_ERROR_VALIDATION_FAILED_EXT;
2552 }
2553 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2554 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2555 return result;
2556}
2557
2558VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2559 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2560 bool skip_call = false;
2561 {
2562 std::lock_guard<std::mutex> lock(global_lock);
2563 skip_call |=
2564 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2565 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2566 }
2567 if (skip_call) {
2568 return VK_ERROR_VALIDATION_FAILED_EXT;
2569 }
2570 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2571 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2572 return result;
2573}
2574
2575VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2576 uint32_t *pSurfaceFormatCount,
2577 VkSurfaceFormatKHR *pSurfaceFormats) {
2578 bool skip_call = false;
2579 {
2580 std::lock_guard<std::mutex> lock(global_lock);
2581 skip_call |=
2582 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2583 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2584 }
2585 if (skip_call) {
2586 return VK_ERROR_VALIDATION_FAILED_EXT;
2587 }
2588 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2589 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2590 return result;
2591}
2592
2593VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2594 uint32_t *pPresentModeCount,
2595 VkPresentModeKHR *pPresentModes) {
2596 bool skip_call = false;
2597 {
2598 std::lock_guard<std::mutex> lock(global_lock);
2599 skip_call |=
2600 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2601 skip_call |= ValidateNonDispatchableObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2602 }
2603 if (skip_call) {
2604 return VK_ERROR_VALIDATION_FAILED_EXT;
2605 }
2606 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2607 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2608 return result;
2609}
2610
2611VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2612 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2613 bool skip_call = false;
2614 {
2615 std::lock_guard<std::mutex> lock(global_lock);
2616 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2617 if (pCreateInfo) {
2618 skip_call |= ValidateNonDispatchableObject(device, pCreateInfo->oldSwapchain,
2619 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2620 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2621 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfo->surface,
2622 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2623 }
2624 }
2625 if (skip_call) {
2626 return VK_ERROR_VALIDATION_FAILED_EXT;
2627 }
2628 VkResult result =
2629 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2630 {
2631 std::lock_guard<std::mutex> lock(global_lock);
2632 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002633 CreateObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002634 }
2635 }
2636 return result;
2637}
2638
2639VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2640 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2641 bool skip_call = false;
2642 {
2643 std::lock_guard<std::mutex> lock(global_lock);
2644 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2645 skip_call |= ValidateNonDispatchableObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2646 skip_call |= ValidateNonDispatchableObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2647 skip_call |= ValidateNonDispatchableObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2648 }
2649 if (skip_call) {
2650 return VK_ERROR_VALIDATION_FAILED_EXT;
2651 }
2652 VkResult result = get_dispatch_table(ot_device_table_map, device)
2653 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2654 return result;
2655}
2656
2657VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2658 bool skip_call = false;
2659 {
2660 std::lock_guard<std::mutex> lock(global_lock);
2661 if (pPresentInfo) {
2662 if (pPresentInfo->pSwapchains) {
2663 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
2664 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pSwapchains[idx0],
2665 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2666 }
2667 }
2668 if (pPresentInfo->pWaitSemaphores) {
2669 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
2670 skip_call |= ValidateNonDispatchableObject(queue, pPresentInfo->pWaitSemaphores[idx1],
2671 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2672 }
2673 }
2674 }
2675 skip_call |= ValidateDispatchableObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
2676 }
2677 if (skip_call) {
2678 return VK_ERROR_VALIDATION_FAILED_EXT;
2679 }
2680 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2681 return result;
2682}
2683
2684#ifdef VK_USE_PLATFORM_WIN32_KHR
2685VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2686 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2687 bool skip_call = false;
2688 {
2689 std::lock_guard<std::mutex> lock(global_lock);
2690 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2691 }
2692 if (skip_call) {
2693 return VK_ERROR_VALIDATION_FAILED_EXT;
2694 }
2695 VkResult result =
2696 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2697 {
2698 std::lock_guard<std::mutex> lock(global_lock);
2699 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002700 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002701 }
2702 }
2703 return result;
2704}
2705
2706VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2707 uint32_t queueFamilyIndex) {
2708 bool skip_call = false;
2709 {
2710 std::lock_guard<std::mutex> lock(global_lock);
2711 skip_call |=
2712 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2713 }
2714 if (skip_call) {
2715 return VK_FALSE;
2716 }
2717 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2718 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2719 return result;
2720}
2721#endif // VK_USE_PLATFORM_WIN32_KHR
2722
2723#ifdef VK_USE_PLATFORM_XCB_KHR
2724VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2725 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2726 bool skip_call = false;
2727 {
2728 std::lock_guard<std::mutex> lock(global_lock);
2729 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2730 }
2731 if (skip_call) {
2732 return VK_ERROR_VALIDATION_FAILED_EXT;
2733 }
2734 VkResult result =
2735 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2736 {
2737 std::lock_guard<std::mutex> lock(global_lock);
2738 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002739 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002740 }
2741 }
2742 return result;
2743}
2744
2745VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2746 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2747 xcb_visualid_t visual_id) {
2748 bool skip_call = false;
2749 {
2750 std::lock_guard<std::mutex> lock(global_lock);
2751 skip_call |=
2752 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2753 }
2754 if (skip_call) {
2755 return VK_FALSE;
2756 }
2757 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2758 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2759 return result;
2760}
2761#endif // VK_USE_PLATFORM_XCB_KHR
2762
2763#ifdef VK_USE_PLATFORM_XLIB_KHR
2764VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2765 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2766 bool skip_call = false;
2767 {
2768 std::lock_guard<std::mutex> lock(global_lock);
2769 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2770 }
2771 if (skip_call) {
2772 return VK_ERROR_VALIDATION_FAILED_EXT;
2773 }
2774 VkResult result =
2775 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2776 {
2777 std::lock_guard<std::mutex> lock(global_lock);
2778 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002779 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002780 }
2781 }
2782 return result;
2783}
2784
2785VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2786 uint32_t queueFamilyIndex, Display *dpy,
2787 VisualID visualID) {
2788 bool skip_call = false;
2789 {
2790 std::lock_guard<std::mutex> lock(global_lock);
2791 skip_call |=
2792 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2793 }
2794 if (skip_call) {
2795 return VK_FALSE;
2796 }
2797 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2798 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2799 return result;
2800}
2801#endif // VK_USE_PLATFORM_XLIB_KHR
2802
2803#ifdef VK_USE_PLATFORM_MIR_KHR
2804VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2805 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2806 bool skip_call = false;
2807 {
2808 std::lock_guard<std::mutex> lock(global_lock);
2809 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2810 }
2811 if (skip_call) {
2812 return VK_ERROR_VALIDATION_FAILED_EXT;
2813 }
2814 VkResult result =
2815 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2816 {
2817 std::lock_guard<std::mutex> lock(global_lock);
2818 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002819 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002820 }
2821 }
2822 return result;
2823}
2824
2825VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2826 uint32_t queueFamilyIndex, MirConnection *connection) {
2827 bool skip_call = false;
2828 {
2829 std::lock_guard<std::mutex> lock(global_lock);
2830 skip_call |=
2831 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2832 }
2833 if (skip_call) {
2834 return VK_FALSE;
2835 }
2836 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2837 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2838 return result;
2839}
2840#endif // VK_USE_PLATFORM_MIR_KHR
2841
2842#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2843VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2844 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2845 bool skip_call = false;
2846 {
2847 std::lock_guard<std::mutex> lock(global_lock);
2848 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2849 }
2850 if (skip_call) {
2851 return VK_ERROR_VALIDATION_FAILED_EXT;
2852 }
2853 VkResult result =
2854 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2855 {
2856 std::lock_guard<std::mutex> lock(global_lock);
2857 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002858 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002859 }
2860 }
2861 return result;
2862}
2863
2864VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2865 uint32_t queueFamilyIndex,
2866 struct wl_display *display) {
2867 bool skip_call = false;
2868 {
2869 std::lock_guard<std::mutex> lock(global_lock);
2870 skip_call |=
2871 ValidateDispatchableObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2872 }
2873 if (skip_call) {
2874 return VK_FALSE;
2875 }
2876 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2877 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2878 return result;
2879}
2880#endif // VK_USE_PLATFORM_WAYLAND_KHR
2881
2882#ifdef VK_USE_PLATFORM_ANDROID_KHR
2883VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2884 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2885 bool skip_call = false;
2886 {
2887 std::lock_guard<std::mutex> lock(global_lock);
2888 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2889 }
2890 if (skip_call) {
2891 return VK_ERROR_VALIDATION_FAILED_EXT;
2892 }
2893 VkResult result =
2894 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2895 {
2896 std::lock_guard<std::mutex> lock(global_lock);
2897 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002898 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002899 }
2900 }
2901 return result;
2902}
2903#endif // VK_USE_PLATFORM_ANDROID_KHR
2904
Mark Youngead9b932016-09-08 12:28:38 -06002905VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
2906 const VkSwapchainCreateInfoKHR *pCreateInfos,
2907 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
2908 bool skip_call = false;
2909 uint32_t i = 0;
2910 {
2911 std::lock_guard<std::mutex> lock(global_lock);
2912 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2913 if (NULL != pCreateInfos) {
2914 for (i = 0; i < swapchainCount; i++) {
2915 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[i].oldSwapchain,
2916 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2917 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
2918 skip_call |= ValidateNonDispatchableObject(device_data->physical_device, pCreateInfos[i].surface,
2919 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2920 }
2921 }
2922 }
2923 if (skip_call) {
2924 return VK_ERROR_VALIDATION_FAILED_EXT;
2925 }
2926 VkResult result =
2927 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
2928 {
2929 std::lock_guard<std::mutex> lock(global_lock);
2930 if (result == VK_SUCCESS) {
2931 for (i = 0; i < swapchainCount; i++) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002932 CreateObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Youngead9b932016-09-08 12:28:38 -06002933 }
2934 }
2935 }
2936 return result;
2937}
2938
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002939VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2940 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2941 const VkAllocationCallbacks *pAllocator,
2942 VkDebugReportCallbackEXT *pCallback) {
2943 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2944 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
2945 if (VK_SUCCESS == result) {
2946 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2947 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
Chris Forbesfeecd402016-09-29 14:53:50 +13002948 CreateObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002949 }
2950 return result;
2951}
2952
2953VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
2954 const VkAllocationCallbacks *pAllocator) {
2955 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2956 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
2957 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2958 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
Chris Forbesec461992016-09-29 14:41:44 +13002959 DestroyObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002960}
2961
2962VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
2963 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
2964 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
2965 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2966 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
2967}
2968
2969static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
2970
2971static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
2972 VK_LAYER_API_VERSION, // specVersion
2973 1, // implementationVersion
2974 "LunarG Validation Layer"};
2975
2976VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
2977 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2978}
2979
2980VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
2981 VkLayerProperties *pProperties) {
2982 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2983}
2984
2985VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
2986 VkExtensionProperties *pProperties) {
2987 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
2988 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
2989
2990 return VK_ERROR_LAYER_NOT_PRESENT;
2991}
2992
2993VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
2994 uint32_t *pCount, VkExtensionProperties *pProperties) {
2995 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
2996 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
2997
2998 assert(physicalDevice);
2999 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3000 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3001}
3002
3003static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3004 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3005 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3006}
3007
3008static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3009 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3010 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3011 return nullptr;
3012
3013 if (!strcmp("vkDestroySurfaceKHR", name))
3014 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3015 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3016 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3017 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3018 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3019 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3020 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3021 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3022 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3023
3024#ifdef VK_USE_PLATFORM_WIN32_KHR
3025 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3026 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3027 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3028 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3029#endif // VK_USE_PLATFORM_WIN32_KHR
3030#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003031 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003032 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003033 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003034 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3035#endif // VK_USE_PLATFORM_XCB_KHR
3036#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003037 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003038 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003039 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003040 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3041#endif // VK_USE_PLATFORM_XLIB_KHR
3042#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003043 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003044 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003045 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003046 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3047#endif // VK_USE_PLATFORM_MIR_KHR
3048#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003049 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003050 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003051 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003052 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3053#endif // VK_USE_PLATFORM_WAYLAND_KHR
3054#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003055 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003056 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3057#endif // VK_USE_PLATFORM_ANDROID_KHR
3058
3059 return nullptr;
3060}
3061
3062static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3063 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3064 device_data->wsi_enabled = false;
Mark Youngead9b932016-09-08 12:28:38 -06003065 device_data->wsi_display_swapchain_enabled = false;
3066 device_data->objtrack_extensions_enabled = false;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003067
3068 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3069 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3070 device_data->wsi_enabled = true;
3071 }
Mark Youngead9b932016-09-08 12:28:38 -06003072 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) {
3073 device_data->wsi_display_swapchain_enabled = true;
3074 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003075 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3076 device_data->objtrack_extensions_enabled = true;
3077 }
3078 }
3079}
3080
3081static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3082 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3083
3084
3085 instanceExtMap[pDisp] = {};
3086
3087 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3088 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3089 instanceExtMap[pDisp].wsi_enabled = true;
3090 }
3091#ifdef VK_USE_PLATFORM_XLIB_KHR
3092 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3093 instanceExtMap[pDisp].xlib_enabled = true;
3094 }
3095#endif
3096#ifdef VK_USE_PLATFORM_XCB_KHR
3097 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3098 instanceExtMap[pDisp].xcb_enabled = true;
3099 }
3100#endif
3101#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3102 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3103 instanceExtMap[pDisp].wayland_enabled = true;
3104 }
3105#endif
3106#ifdef VK_USE_PLATFORM_MIR_KHR
3107 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3108 instanceExtMap[pDisp].mir_enabled = true;
3109 }
3110#endif
3111#ifdef VK_USE_PLATFORM_ANDROID_KHR
3112 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3113 instanceExtMap[pDisp].android_enabled = true;
3114 }
3115#endif
3116#ifdef VK_USE_PLATFORM_WIN32_KHR
3117 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3118 instanceExtMap[pDisp].win32_enabled = true;
3119 }
3120#endif
3121 }
3122}
3123
3124VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3125 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3126 std::lock_guard<std::mutex> lock(global_lock);
3127 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3128 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3129
3130 assert(chain_info->u.pLayerInfo);
3131 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3132 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3133 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3134 if (fpCreateDevice == NULL) {
3135 return VK_ERROR_INITIALIZATION_FAILED;
3136 }
3137
3138 // Advance the link info for the next element on the chain
3139 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3140
3141 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3142 if (result != VK_SUCCESS) {
3143 return result;
3144 }
3145
3146 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3147 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3148
3149 // Add link back to physDev
3150 device_data->physical_device = physicalDevice;
3151
3152 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3153
3154 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
Chris Forbesfeecd402016-09-29 14:53:50 +13003155 CreateObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003156
3157 return result;
3158}
3159
3160VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3161 uint32_t *pQueueFamilyPropertyCount,
3162 VkQueueFamilyProperties *pQueueFamilyProperties) {
3163 get_dispatch_table(ot_instance_table_map, physicalDevice)
3164 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3165 std::lock_guard<std::mutex> lock(global_lock);
3166 if (pQueueFamilyProperties != NULL) {
3167 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3168 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3169 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3170 }
3171 }
3172}
3173
3174VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3175 VkInstance *pInstance) {
3176 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3177
3178 assert(chain_info->u.pLayerInfo);
3179 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3180 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3181 if (fpCreateInstance == NULL) {
3182 return VK_ERROR_INITIALIZATION_FAILED;
3183 }
3184
3185 // Advance the link info for the next element on the chain
3186 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3187
3188 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3189 if (result != VK_SUCCESS) {
3190 return result;
3191 }
3192
3193 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3194 instance_data->instance = *pInstance;
3195 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3196 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3197
3198 // Look for one or more debug report create info structures, and copy the
3199 // callback(s) for each one found (for use by vkDestroyInstance)
3200 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3201 &instance_data->tmp_callbacks);
3202
3203 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3204 pCreateInfo->ppEnabledExtensionNames);
3205
3206 InitObjectTracker(instance_data, pAllocator);
3207 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3208
Chris Forbesfeecd402016-09-29 14:53:50 +13003209 CreateObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003210
3211 return result;
3212}
3213
3214VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3215 VkPhysicalDevice *pPhysicalDevices) {
3216 bool skip_call = VK_FALSE;
3217 std::unique_lock<std::mutex> lock(global_lock);
3218 skip_call |= ValidateDispatchableObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
3219 lock.unlock();
3220 if (skip_call) {
3221 return VK_ERROR_VALIDATION_FAILED_EXT;
3222 }
3223 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3224 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3225 lock.lock();
3226 if (result == VK_SUCCESS) {
3227 if (pPhysicalDevices) {
3228 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
Chris Forbesfeecd402016-09-29 14:53:50 +13003229 CreateObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003230 }
3231 }
3232 }
3233 lock.unlock();
3234 return result;
3235}
3236
3237VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3238 std::unique_lock<std::mutex> lock(global_lock);
3239 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3240 lock.unlock();
3241
3242 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3243
3244 lock.lock();
3245
3246 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3247 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3248}
3249
3250VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3251 std::unique_lock<std::mutex> lock(global_lock);
3252 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3253 lock.unlock();
3254
3255 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3256
3257 lock.lock();
Chris Forbesec461992016-09-29 14:41:44 +13003258 DestroyObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003259}
3260
3261VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3262 VkMemoryMapFlags flags, void **ppData) {
3263 bool skip_call = VK_FALSE;
3264 std::unique_lock<std::mutex> lock(global_lock);
3265 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3266 lock.unlock();
3267 if (skip_call == VK_TRUE) {
3268 return VK_ERROR_VALIDATION_FAILED_EXT;
3269 }
3270 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3271 return result;
3272}
3273
3274VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3275 bool skip_call = VK_FALSE;
3276 std::unique_lock<std::mutex> lock(global_lock);
3277 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3278 lock.unlock();
3279 if (skip_call == VK_TRUE) {
3280 return;
3281 }
3282
3283 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3284}
3285VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3286 VkFence fence) {
3287 std::unique_lock<std::mutex> lock(global_lock);
3288 ValidateQueueFlags(queue, "QueueBindSparse");
3289
3290 for (uint32_t i = 0; i < bindInfoCount; i++) {
3291 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
3292 ValidateNonDispatchableObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
3293 false);
3294 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
3295 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
3296 false);
3297 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
3298 ValidateNonDispatchableObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
3299 }
3300 lock.unlock();
3301
3302 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3303 return result;
3304}
3305
3306VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3307 VkCommandBuffer *pCommandBuffers) {
3308 bool skip_call = VK_FALSE;
3309 std::unique_lock<std::mutex> lock(global_lock);
3310 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3311 skip_call |=
3312 ValidateNonDispatchableObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3313 lock.unlock();
3314
3315 if (skip_call) {
3316 return VK_ERROR_VALIDATION_FAILED_EXT;
3317 }
3318
3319 VkResult result =
3320 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3321
3322 lock.lock();
3323 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3324 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3325 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3326 }
3327 lock.unlock();
3328
3329 return result;
3330}
3331
3332VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3333 VkDescriptorSet *pDescriptorSets) {
3334 bool skip_call = VK_FALSE;
3335 std::unique_lock<std::mutex> lock(global_lock);
3336 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3337 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->descriptorPool,
3338 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3339 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3340 skip_call |= ValidateNonDispatchableObject(device, pAllocateInfo->pSetLayouts[i],
3341 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3342 }
3343 lock.unlock();
3344 if (skip_call) {
3345 return VK_ERROR_VALIDATION_FAILED_EXT;
3346 }
3347
3348 VkResult result =
3349 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3350
3351 if (VK_SUCCESS == result) {
3352 lock.lock();
3353 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3354 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3355 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3356 }
3357 lock.unlock();
3358 }
3359
3360 return result;
3361}
3362
3363VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3364 const VkCommandBuffer *pCommandBuffers) {
3365 bool skip_call = false;
3366 std::unique_lock<std::mutex> lock(global_lock);
3367 ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3368 ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3369 for (uint32_t i = 0; i < commandBufferCount; i++) {
3370 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3371 }
3372
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003373 for (uint32_t i = 0; i < commandBufferCount; i++) {
Chris Forbesec461992016-09-29 14:41:44 +13003374 DestroyObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003375 }
3376
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003377 lock.unlock();
3378 if (!skip_call) {
3379 get_dispatch_table(ot_device_table_map, device)
3380 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3381 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003382}
3383VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3384 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3385 std::unique_lock<std::mutex> lock(global_lock);
3386 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3387 // Remove this swapchain's images from our map of such images.
3388 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3389 while (itr != device_data->swapchainImageMap.end()) {
3390 OBJTRACK_NODE *pNode = (*itr).second;
3391 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3392 delete pNode;
3393 auto delete_item = itr++;
3394 device_data->swapchainImageMap.erase(delete_item);
3395 } else {
3396 ++itr;
3397 }
3398 }
Chris Forbesec461992016-09-29 14:41:44 +13003399 DestroyObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003400 lock.unlock();
3401
3402 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3403}
3404
3405VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3406 const VkDescriptorSet *pDescriptorSets) {
3407 bool skip_call = false;
3408 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3409 std::unique_lock<std::mutex> lock(global_lock);
3410 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3411 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3412 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3413 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3414 }
3415
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003416 for (uint32_t i = 0; i < descriptorSetCount; i++) {
Chris Forbesec461992016-09-29 14:41:44 +13003417 DestroyObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003418 }
3419
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003420 lock.unlock();
3421 if (!skip_call) {
3422 result = get_dispatch_table(ot_device_table_map, device)
3423 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3424 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003425 return result;
3426}
3427
3428VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3429 const VkAllocationCallbacks *pAllocator) {
3430 bool skip_call = VK_FALSE;
3431 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3432 std::unique_lock<std::mutex> lock(global_lock);
3433 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3434 skip_call |= ValidateNonDispatchableObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3435 lock.unlock();
3436 if (skip_call) {
3437 return;
3438 }
3439 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3440 // Remove this pool's descriptor sets from our descriptorSet map.
3441 lock.lock();
3442 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3443 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3444 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3445 OBJTRACK_NODE *pNode = (*itr).second;
3446 auto del_itr = itr++;
3447 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
Chris Forbesec461992016-09-29 14:41:44 +13003448 DestroyObject(device, (VkDescriptorSet)((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003449 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003450 }
3451 }
Chris Forbesec461992016-09-29 14:41:44 +13003452 DestroyObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003453 lock.unlock();
3454 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3455}
3456
3457VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3458 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3459 bool skip_call = false;
3460 std::unique_lock<std::mutex> lock(global_lock);
3461 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3462 skip_call |= ValidateNonDispatchableObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3463 lock.unlock();
3464 if (skip_call) {
3465 return;
3466 }
3467 lock.lock();
3468 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3469 // Remove this pool's cmdBuffers from our cmd buffer map.
3470 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3471 auto del_itr = itr;
3472 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3473 OBJTRACK_NODE *pNode = (*itr).second;
3474 del_itr = itr++;
3475 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3476 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
Chris Forbesec461992016-09-29 14:41:44 +13003477 DestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
Chris Forbes3e51a202016-09-29 14:35:09 +13003478 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003479 }
3480 }
Chris Forbesec461992016-09-29 14:41:44 +13003481 DestroyObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003482 lock.unlock();
3483 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3484}
3485
3486VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3487 VkImage *pSwapchainImages) {
3488 bool skip_call = VK_FALSE;
3489 std::unique_lock<std::mutex> lock(global_lock);
3490 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3491 lock.unlock();
3492 if (skip_call) {
3493 return VK_ERROR_VALIDATION_FAILED_EXT;
3494 }
3495 VkResult result = get_dispatch_table(ot_device_table_map, device)
3496 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3497 if (pSwapchainImages != NULL) {
3498 lock.lock();
3499 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3500 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3501 }
3502 lock.unlock();
3503 }
3504 return result;
3505}
3506
3507VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3508 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3509 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3510 bool skip_call = VK_FALSE;
3511 std::unique_lock<std::mutex> lock(global_lock);
3512 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3513 if (pCreateInfos) {
3514 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3515 if (pCreateInfos[idx0].basePipelineHandle) {
3516 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3517 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3518 }
3519 if (pCreateInfos[idx0].layout) {
3520 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3521 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3522 }
3523 if (pCreateInfos[idx0].pStages) {
3524 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3525 if (pCreateInfos[idx0].pStages[idx1].module) {
3526 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].pStages[idx1].module,
3527 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3528 }
3529 }
3530 }
3531 if (pCreateInfos[idx0].renderPass) {
3532 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].renderPass,
3533 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3534 }
3535 }
3536 }
3537 if (pipelineCache) {
3538 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3539 }
3540 lock.unlock();
3541 if (skip_call) {
3542 return VK_ERROR_VALIDATION_FAILED_EXT;
3543 }
3544 VkResult result = get_dispatch_table(ot_device_table_map, device)
3545 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3546 lock.lock();
3547 if (result == VK_SUCCESS) {
3548 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
Chris Forbesfeecd402016-09-29 14:53:50 +13003549 CreateObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003550 }
3551 }
3552 lock.unlock();
3553 return result;
3554}
3555
3556VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3557 const VkComputePipelineCreateInfo *pCreateInfos,
3558 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3559 bool skip_call = VK_FALSE;
3560 std::unique_lock<std::mutex> lock(global_lock);
3561 skip_call |= ValidateDispatchableObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3562 if (pCreateInfos) {
3563 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3564 if (pCreateInfos[idx0].basePipelineHandle) {
3565 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].basePipelineHandle,
3566 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3567 }
3568 if (pCreateInfos[idx0].layout) {
3569 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].layout,
3570 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3571 }
3572 if (pCreateInfos[idx0].stage.module) {
3573 skip_call |= ValidateNonDispatchableObject(device, pCreateInfos[idx0].stage.module,
3574 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3575 }
3576 }
3577 }
3578 if (pipelineCache) {
3579 skip_call |= ValidateNonDispatchableObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
3580 }
3581 lock.unlock();
3582 if (skip_call) {
3583 return VK_ERROR_VALIDATION_FAILED_EXT;
3584 }
3585 VkResult result = get_dispatch_table(ot_device_table_map, device)
3586 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3587 lock.lock();
3588 if (result == VK_SUCCESS) {
3589 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
Chris Forbesfeecd402016-09-29 14:53:50 +13003590 CreateObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003591 }
3592 }
3593 lock.unlock();
3594 return result;
3595}
3596
3597static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3598 if (!name || name[0] != 'v' || name[1] != 'k')
3599 return NULL;
3600
3601 name += 2;
3602 if (!strcmp(name, "GetDeviceProcAddr"))
3603 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3604 if (!strcmp(name, "DestroyDevice"))
3605 return (PFN_vkVoidFunction)DestroyDevice;
3606 if (!strcmp(name, "GetDeviceQueue"))
3607 return (PFN_vkVoidFunction)GetDeviceQueue;
3608 if (!strcmp(name, "QueueSubmit"))
3609 return (PFN_vkVoidFunction)QueueSubmit;
3610 if (!strcmp(name, "QueueWaitIdle"))
3611 return (PFN_vkVoidFunction)QueueWaitIdle;
3612 if (!strcmp(name, "DeviceWaitIdle"))
3613 return (PFN_vkVoidFunction)DeviceWaitIdle;
3614 if (!strcmp(name, "AllocateMemory"))
3615 return (PFN_vkVoidFunction)AllocateMemory;
3616 if (!strcmp(name, "FreeMemory"))
3617 return (PFN_vkVoidFunction)FreeMemory;
3618 if (!strcmp(name, "MapMemory"))
3619 return (PFN_vkVoidFunction)MapMemory;
3620 if (!strcmp(name, "UnmapMemory"))
3621 return (PFN_vkVoidFunction)UnmapMemory;
3622 if (!strcmp(name, "FlushMappedMemoryRanges"))
3623 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3624 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3625 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3626 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3627 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3628 if (!strcmp(name, "BindBufferMemory"))
3629 return (PFN_vkVoidFunction)BindBufferMemory;
3630 if (!strcmp(name, "BindImageMemory"))
3631 return (PFN_vkVoidFunction)BindImageMemory;
3632 if (!strcmp(name, "GetBufferMemoryRequirements"))
3633 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3634 if (!strcmp(name, "GetImageMemoryRequirements"))
3635 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3636 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3637 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3638 if (!strcmp(name, "QueueBindSparse"))
3639 return (PFN_vkVoidFunction)QueueBindSparse;
3640 if (!strcmp(name, "CreateFence"))
3641 return (PFN_vkVoidFunction)CreateFence;
3642 if (!strcmp(name, "DestroyFence"))
3643 return (PFN_vkVoidFunction)DestroyFence;
3644 if (!strcmp(name, "ResetFences"))
3645 return (PFN_vkVoidFunction)ResetFences;
3646 if (!strcmp(name, "GetFenceStatus"))
3647 return (PFN_vkVoidFunction)GetFenceStatus;
3648 if (!strcmp(name, "WaitForFences"))
3649 return (PFN_vkVoidFunction)WaitForFences;
3650 if (!strcmp(name, "CreateSemaphore"))
3651 return (PFN_vkVoidFunction)CreateSemaphore;
3652 if (!strcmp(name, "DestroySemaphore"))
3653 return (PFN_vkVoidFunction)DestroySemaphore;
3654 if (!strcmp(name, "CreateEvent"))
3655 return (PFN_vkVoidFunction)CreateEvent;
3656 if (!strcmp(name, "DestroyEvent"))
3657 return (PFN_vkVoidFunction)DestroyEvent;
3658 if (!strcmp(name, "GetEventStatus"))
3659 return (PFN_vkVoidFunction)GetEventStatus;
3660 if (!strcmp(name, "SetEvent"))
3661 return (PFN_vkVoidFunction)SetEvent;
3662 if (!strcmp(name, "ResetEvent"))
3663 return (PFN_vkVoidFunction)ResetEvent;
3664 if (!strcmp(name, "CreateQueryPool"))
3665 return (PFN_vkVoidFunction)CreateQueryPool;
3666 if (!strcmp(name, "DestroyQueryPool"))
3667 return (PFN_vkVoidFunction)DestroyQueryPool;
3668 if (!strcmp(name, "GetQueryPoolResults"))
3669 return (PFN_vkVoidFunction)GetQueryPoolResults;
3670 if (!strcmp(name, "CreateBuffer"))
3671 return (PFN_vkVoidFunction)CreateBuffer;
3672 if (!strcmp(name, "DestroyBuffer"))
3673 return (PFN_vkVoidFunction)DestroyBuffer;
3674 if (!strcmp(name, "CreateBufferView"))
3675 return (PFN_vkVoidFunction)CreateBufferView;
3676 if (!strcmp(name, "DestroyBufferView"))
3677 return (PFN_vkVoidFunction)DestroyBufferView;
3678 if (!strcmp(name, "CreateImage"))
3679 return (PFN_vkVoidFunction)CreateImage;
3680 if (!strcmp(name, "DestroyImage"))
3681 return (PFN_vkVoidFunction)DestroyImage;
3682 if (!strcmp(name, "GetImageSubresourceLayout"))
3683 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3684 if (!strcmp(name, "CreateImageView"))
3685 return (PFN_vkVoidFunction)CreateImageView;
3686 if (!strcmp(name, "DestroyImageView"))
3687 return (PFN_vkVoidFunction)DestroyImageView;
3688 if (!strcmp(name, "CreateShaderModule"))
3689 return (PFN_vkVoidFunction)CreateShaderModule;
3690 if (!strcmp(name, "DestroyShaderModule"))
3691 return (PFN_vkVoidFunction)DestroyShaderModule;
3692 if (!strcmp(name, "CreatePipelineCache"))
3693 return (PFN_vkVoidFunction)CreatePipelineCache;
3694 if (!strcmp(name, "DestroyPipelineCache"))
3695 return (PFN_vkVoidFunction)DestroyPipelineCache;
3696 if (!strcmp(name, "GetPipelineCacheData"))
3697 return (PFN_vkVoidFunction)GetPipelineCacheData;
3698 if (!strcmp(name, "MergePipelineCaches"))
3699 return (PFN_vkVoidFunction)MergePipelineCaches;
3700 if (!strcmp(name, "CreateGraphicsPipelines"))
3701 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3702 if (!strcmp(name, "CreateComputePipelines"))
3703 return (PFN_vkVoidFunction)CreateComputePipelines;
3704 if (!strcmp(name, "DestroyPipeline"))
3705 return (PFN_vkVoidFunction)DestroyPipeline;
3706 if (!strcmp(name, "CreatePipelineLayout"))
3707 return (PFN_vkVoidFunction)CreatePipelineLayout;
3708 if (!strcmp(name, "DestroyPipelineLayout"))
3709 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3710 if (!strcmp(name, "CreateSampler"))
3711 return (PFN_vkVoidFunction)CreateSampler;
3712 if (!strcmp(name, "DestroySampler"))
3713 return (PFN_vkVoidFunction)DestroySampler;
3714 if (!strcmp(name, "CreateDescriptorSetLayout"))
3715 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3716 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3717 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3718 if (!strcmp(name, "CreateDescriptorPool"))
3719 return (PFN_vkVoidFunction)CreateDescriptorPool;
3720 if (!strcmp(name, "DestroyDescriptorPool"))
3721 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3722 if (!strcmp(name, "ResetDescriptorPool"))
3723 return (PFN_vkVoidFunction)ResetDescriptorPool;
3724 if (!strcmp(name, "AllocateDescriptorSets"))
3725 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3726 if (!strcmp(name, "FreeDescriptorSets"))
3727 return (PFN_vkVoidFunction)FreeDescriptorSets;
3728 if (!strcmp(name, "UpdateDescriptorSets"))
3729 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3730 if (!strcmp(name, "CreateFramebuffer"))
3731 return (PFN_vkVoidFunction)CreateFramebuffer;
3732 if (!strcmp(name, "DestroyFramebuffer"))
3733 return (PFN_vkVoidFunction)DestroyFramebuffer;
3734 if (!strcmp(name, "CreateRenderPass"))
3735 return (PFN_vkVoidFunction)CreateRenderPass;
3736 if (!strcmp(name, "DestroyRenderPass"))
3737 return (PFN_vkVoidFunction)DestroyRenderPass;
3738 if (!strcmp(name, "GetRenderAreaGranularity"))
3739 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3740 if (!strcmp(name, "CreateCommandPool"))
3741 return (PFN_vkVoidFunction)CreateCommandPool;
3742 if (!strcmp(name, "DestroyCommandPool"))
3743 return (PFN_vkVoidFunction)DestroyCommandPool;
3744 if (!strcmp(name, "ResetCommandPool"))
3745 return (PFN_vkVoidFunction)ResetCommandPool;
3746 if (!strcmp(name, "AllocateCommandBuffers"))
3747 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3748 if (!strcmp(name, "FreeCommandBuffers"))
3749 return (PFN_vkVoidFunction)FreeCommandBuffers;
3750 if (!strcmp(name, "BeginCommandBuffer"))
3751 return (PFN_vkVoidFunction)BeginCommandBuffer;
3752 if (!strcmp(name, "EndCommandBuffer"))
3753 return (PFN_vkVoidFunction)EndCommandBuffer;
3754 if (!strcmp(name, "ResetCommandBuffer"))
3755 return (PFN_vkVoidFunction)ResetCommandBuffer;
3756 if (!strcmp(name, "CmdBindPipeline"))
3757 return (PFN_vkVoidFunction)CmdBindPipeline;
3758 if (!strcmp(name, "CmdSetViewport"))
3759 return (PFN_vkVoidFunction)CmdSetViewport;
3760 if (!strcmp(name, "CmdSetScissor"))
3761 return (PFN_vkVoidFunction)CmdSetScissor;
3762 if (!strcmp(name, "CmdSetLineWidth"))
3763 return (PFN_vkVoidFunction)CmdSetLineWidth;
3764 if (!strcmp(name, "CmdSetDepthBias"))
3765 return (PFN_vkVoidFunction)CmdSetDepthBias;
3766 if (!strcmp(name, "CmdSetBlendConstants"))
3767 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3768 if (!strcmp(name, "CmdSetDepthBounds"))
3769 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3770 if (!strcmp(name, "CmdSetStencilCompareMask"))
3771 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3772 if (!strcmp(name, "CmdSetStencilWriteMask"))
3773 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3774 if (!strcmp(name, "CmdSetStencilReference"))
3775 return (PFN_vkVoidFunction)CmdSetStencilReference;
3776 if (!strcmp(name, "CmdBindDescriptorSets"))
3777 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3778 if (!strcmp(name, "CmdBindIndexBuffer"))
3779 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3780 if (!strcmp(name, "CmdBindVertexBuffers"))
3781 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3782 if (!strcmp(name, "CmdDraw"))
3783 return (PFN_vkVoidFunction)CmdDraw;
3784 if (!strcmp(name, "CmdDrawIndexed"))
3785 return (PFN_vkVoidFunction)CmdDrawIndexed;
3786 if (!strcmp(name, "CmdDrawIndirect"))
3787 return (PFN_vkVoidFunction)CmdDrawIndirect;
3788 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3789 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3790 if (!strcmp(name, "CmdDispatch"))
3791 return (PFN_vkVoidFunction)CmdDispatch;
3792 if (!strcmp(name, "CmdDispatchIndirect"))
3793 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3794 if (!strcmp(name, "CmdCopyBuffer"))
3795 return (PFN_vkVoidFunction)CmdCopyBuffer;
3796 if (!strcmp(name, "CmdCopyImage"))
3797 return (PFN_vkVoidFunction)CmdCopyImage;
3798 if (!strcmp(name, "CmdBlitImage"))
3799 return (PFN_vkVoidFunction)CmdBlitImage;
3800 if (!strcmp(name, "CmdCopyBufferToImage"))
3801 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3802 if (!strcmp(name, "CmdCopyImageToBuffer"))
3803 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3804 if (!strcmp(name, "CmdUpdateBuffer"))
3805 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3806 if (!strcmp(name, "CmdFillBuffer"))
3807 return (PFN_vkVoidFunction)CmdFillBuffer;
3808 if (!strcmp(name, "CmdClearColorImage"))
3809 return (PFN_vkVoidFunction)CmdClearColorImage;
3810 if (!strcmp(name, "CmdClearDepthStencilImage"))
3811 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3812 if (!strcmp(name, "CmdClearAttachments"))
3813 return (PFN_vkVoidFunction)CmdClearAttachments;
3814 if (!strcmp(name, "CmdResolveImage"))
3815 return (PFN_vkVoidFunction)CmdResolveImage;
3816 if (!strcmp(name, "CmdSetEvent"))
3817 return (PFN_vkVoidFunction)CmdSetEvent;
3818 if (!strcmp(name, "CmdResetEvent"))
3819 return (PFN_vkVoidFunction)CmdResetEvent;
3820 if (!strcmp(name, "CmdWaitEvents"))
3821 return (PFN_vkVoidFunction)CmdWaitEvents;
3822 if (!strcmp(name, "CmdPipelineBarrier"))
3823 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3824 if (!strcmp(name, "CmdBeginQuery"))
3825 return (PFN_vkVoidFunction)CmdBeginQuery;
3826 if (!strcmp(name, "CmdEndQuery"))
3827 return (PFN_vkVoidFunction)CmdEndQuery;
3828 if (!strcmp(name, "CmdResetQueryPool"))
3829 return (PFN_vkVoidFunction)CmdResetQueryPool;
3830 if (!strcmp(name, "CmdWriteTimestamp"))
3831 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3832 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3833 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3834 if (!strcmp(name, "CmdPushConstants"))
3835 return (PFN_vkVoidFunction)CmdPushConstants;
3836 if (!strcmp(name, "CmdBeginRenderPass"))
3837 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3838 if (!strcmp(name, "CmdNextSubpass"))
3839 return (PFN_vkVoidFunction)CmdNextSubpass;
3840 if (!strcmp(name, "CmdEndRenderPass"))
3841 return (PFN_vkVoidFunction)CmdEndRenderPass;
3842 if (!strcmp(name, "CmdExecuteCommands"))
3843 return (PFN_vkVoidFunction)CmdExecuteCommands;
3844
3845 return NULL;
3846}
3847static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3848 if (!name || name[0] != 'v' || name[1] != 'k')
3849 return NULL;
3850
3851 name += 2;
3852 if (!strcmp(name, "CreateInstance"))
3853 return (PFN_vkVoidFunction)CreateInstance;
3854 if (!strcmp(name, "DestroyInstance"))
3855 return (PFN_vkVoidFunction)DestroyInstance;
3856 if (!strcmp(name, "EnumeratePhysicalDevices"))
3857 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
3858 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
3859 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
3860 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
3861 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
3862 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
3863 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
3864 if (!strcmp(name, "GetPhysicalDeviceProperties"))
3865 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
3866 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
3867 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
3868 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
3869 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
3870 if (!strcmp(name, "GetInstanceProcAddr"))
3871 return (PFN_vkVoidFunction)GetInstanceProcAddr;
3872 if (!strcmp(name, "CreateDevice"))
3873 return (PFN_vkVoidFunction)CreateDevice;
3874 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
3875 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
3876 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
3877 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
3878 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
3879 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
3880 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
3881 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
3882
3883 return NULL;
3884}
3885
3886static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
3887 if (device) {
3888 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Mark Youngead9b932016-09-08 12:28:38 -06003889
3890 if (device_data->wsi_enabled) {
3891 if (!strcmp("vkCreateSwapchainKHR", name))
3892 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
3893 if (!strcmp("vkDestroySwapchainKHR", name))
3894 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
3895 if (!strcmp("vkGetSwapchainImagesKHR", name))
3896 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
3897 if (!strcmp("vkAcquireNextImageKHR", name))
3898 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
3899 if (!strcmp("vkQueuePresentKHR", name))
3900 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
3901 }
3902
3903 if (device_data->wsi_display_swapchain_enabled) {
3904 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
3905 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
3906 }
3907 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003908 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003909
3910 return nullptr;
3911}
3912
3913VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
3914 PFN_vkVoidFunction addr;
3915 addr = InterceptCoreDeviceCommand(funcName);
3916 if (addr) {
3917 return addr;
3918 }
3919 assert(device);
3920
3921 addr = InterceptWsiEnabledCommand(funcName, device);
3922 if (addr) {
3923 return addr;
3924 }
3925 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
3926 return NULL;
3927 }
3928 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
3929}
3930
3931VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
3932 PFN_vkVoidFunction addr;
3933 addr = InterceptCoreInstanceCommand(funcName);
3934 if (!addr) {
3935 addr = InterceptCoreDeviceCommand(funcName);
3936 }
3937 if (!addr) {
3938 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
3939 }
3940 if (addr) {
3941 return addr;
3942 }
3943 assert(instance);
3944
3945 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
3946 if (addr) {
3947 return addr;
3948 }
3949 addr = InterceptWsiEnabledCommand(funcName, instance);
3950 if (addr) {
3951 return addr;
3952 }
3953 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
3954 return NULL;
3955 }
3956 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
3957}
3958
3959} // namespace object_tracker
3960
3961// vk_layer_logging.h expects these to be defined
3962VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
3963 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
3964 const VkAllocationCallbacks *pAllocator,
3965 VkDebugReportCallbackEXT *pMsgCallback) {
3966 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
3967}
3968
3969VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
3970 const VkAllocationCallbacks *pAllocator) {
3971 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
3972}
3973
3974VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
3975 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
3976 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
3977 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
3978}
3979
3980// Loader-layer interface v0, just wrappers since there is only a layer
3981VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
3982 VkExtensionProperties *pProperties) {
3983 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
3984}
3985
3986VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
3987 VkLayerProperties *pProperties) {
3988 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
3989}
3990
3991VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
3992 VkLayerProperties *pProperties) {
3993 // The layer command handles VK_NULL_HANDLE just fine internally
3994 assert(physicalDevice == VK_NULL_HANDLE);
3995 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
3996}
3997
3998VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
3999 return object_tracker::GetDeviceProcAddr(dev, funcName);
4000}
4001
4002VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
4003 return object_tracker::GetInstanceProcAddr(instance, funcName);
4004}
4005
4006VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
4007 const char *pLayerName, uint32_t *pCount,
4008 VkExtensionProperties *pProperties) {
4009 // The layer command handles VK_NULL_HANDLE just fine internally
4010 assert(physicalDevice == VK_NULL_HANDLE);
4011 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
Mark Lobodzinski38080682016-07-22 15:30:27 -06004012}