blob: 11a78c0288c84b81665904550dbc97ae43d60707 [file] [log] [blame]
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Mark Lobodzinski <mark@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Mike Stroyan <stroyan@google.com>
24 * Author: Tony Barbour <tony@LunarG.com>
25 */
26
27#include "vk_loader_platform.h"
28#include "vulkan/vulkan.h"
29
30#include <cinttypes>
31#include <stdio.h>
32#include <stdlib.h>
33#include <string.h>
34
35#include <unordered_map>
36
37#include "vk_layer_config.h"
38#include "vk_layer_data.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
41#include "vulkan/vk_layer.h"
42
43#include "object_tracker.h"
44
45namespace object_tracker {
46
47static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
48
49 layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
50}
51
52// Add new queue to head of global queue list
53static void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue) {
54 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
55 auto queueItem = device_data->queue_info_map.find(queue);
56 if (queueItem == device_data->queue_info_map.end()) {
57 OT_QUEUE_INFO *p_queue_info = new OT_QUEUE_INFO;
58 if (p_queue_info != NULL) {
59 memset(p_queue_info, 0, sizeof(OT_QUEUE_INFO));
60 p_queue_info->queue = queue;
61 p_queue_info->queue_node_index = queue_node_index;
62 device_data->queue_info_map[queue] = p_queue_info;
63 } else {
64 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
65 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, LayerName,
66 "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
67 }
68 }
69}
70
71// Destroy memRef lists and free all memory
72static void DestroyQueueDataStructures(VkDevice device) {
73 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
74
75 for (auto queue_item : device_data->queue_info_map) {
76 delete queue_item.second;
77 }
78 device_data->queue_info_map.clear();
79
80 // Destroy the items in the queue map
81 auto queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].begin();
82 while (queue != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
83 uint32_t obj_index = queue->second->object_type;
84 assert(device_data->num_total_objects > 0);
85 device_data->num_total_objects--;
86 assert(device_data->num_objects[obj_index] > 0);
87 device_data->num_objects[obj_index]--;
88 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, queue->second->object_type, queue->second->handle,
89 __LINE__, OBJTRACK_NONE, LayerName,
90 "OBJ_STAT Destroy Queue obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " Queue objs).",
91 queue->second->handle, device_data->num_total_objects, device_data->num_objects[obj_index]);
92 delete queue->second;
93 queue = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].erase(queue);
94 }
95}
96
97// Check Queue type flags for selected queue operations
98static void ValidateQueueFlags(VkQueue queue, const char *function) {
99 layer_data *device_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
100 auto queue_item = device_data->queue_info_map.find(queue);
101 if (queue_item != device_data->queue_info_map.end()) {
102 OT_QUEUE_INFO *pQueueInfo = queue_item->second;
103 if (pQueueInfo != NULL) {
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(device_data->physical_device), layer_data_map);
105 if ((instance_data->queue_family_properties[pQueueInfo->queue_node_index].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ==
106 0) {
107 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
108 reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, LayerName,
109 "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
110 }
111 }
112 }
113}
114
115static void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
116 VkDebugReportObjectTypeEXT object_type, VkCommandBufferLevel level) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118
119 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<const uint64_t>(command_buffer),
120 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
121 string_VkDebugReportObjectTypeEXT(object_type), reinterpret_cast<const uint64_t>(command_buffer));
122
123 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
124 pNewObjNode->object_type = object_type;
125 pNewObjNode->handle = reinterpret_cast<const uint64_t>(command_buffer);
126 pNewObjNode->parent_object = reinterpret_cast<const uint64_t &>(command_pool);
127 if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
128 pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
129 } else {
130 pNewObjNode->status = OBJSTATUS_NONE;
131 }
132 device_data->object_map[object_type][reinterpret_cast<const uint64_t>(command_buffer)] = pNewObjNode;
133 device_data->num_objects[object_type]++;
134 device_data->num_total_objects++;
135}
136
137static bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer) {
138 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
139 bool skip_call = false;
140 uint64_t object_handle = reinterpret_cast<uint64_t>(command_buffer);
141 if (device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].find(object_handle) !=
142 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
143 OBJTRACK_NODE *pNode =
144 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<uint64_t>(command_buffer)];
145
146 if (pNode->parent_object != reinterpret_cast<uint64_t &>(command_pool)) {
147 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
148 __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, LayerName,
149 "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64
150 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
151 reinterpret_cast<uint64_t>(command_buffer), pNode->parent_object,
152 reinterpret_cast<uint64_t &>(command_pool));
153 }
154 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600155 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
156 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove command buffer obj 0x%" PRIxLEAST64
157 ". Was it created? Has it already been destroyed?",
158 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600159 }
160 return skip_call;
161}
162
163static void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set,
164 VkDebugReportObjectTypeEXT object_type) {
165 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
166
167 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type,
168 reinterpret_cast<uint64_t &>(descriptor_set), __LINE__, OBJTRACK_NONE, LayerName,
169 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_name[object_type],
170 reinterpret_cast<uint64_t &>(descriptor_set));
171
172 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
173 pNewObjNode->object_type = object_type;
174 pNewObjNode->status = OBJSTATUS_NONE;
175 pNewObjNode->handle = reinterpret_cast<uint64_t &>(descriptor_set);
176 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(descriptor_pool);
177 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT][reinterpret_cast<uint64_t &>(descriptor_set)] =
178 pNewObjNode;
179 device_data->num_objects[object_type]++;
180 device_data->num_total_objects++;
181}
182
183static bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set) {
184 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
185 bool skip_call = false;
186 uint64_t object_handle = reinterpret_cast<uint64_t &>(descriptor_set);
187 auto dsItem = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].find(object_handle);
188 if (dsItem != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
189 OBJTRACK_NODE *pNode = dsItem->second;
190
191 if (pNode->parent_object != reinterpret_cast<uint64_t &>(descriptor_pool)) {
192 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, object_handle,
193 __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, LayerName,
194 "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64
195 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
196 reinterpret_cast<uint64_t &>(descriptor_set), pNode->parent_object,
197 reinterpret_cast<uint64_t &>(descriptor_pool));
198 }
199 } else {
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600200 skip_call |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle,
201 __LINE__, OBJTRACK_NONE, LayerName, "Unable to remove descriptor set obj 0x%" PRIxLEAST64
202 ". Was it created? Has it already been destroyed?",
203 object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600204 }
205 return skip_call;
206}
207
208static void CreateQueue(VkDevice device, VkQueue vkObj, VkDebugReportObjectTypeEXT object_type) {
209 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
210
211 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, reinterpret_cast<uint64_t>(vkObj), __LINE__,
212 OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
213 object_name[object_type], reinterpret_cast<uint64_t>(vkObj));
214
215 OBJTRACK_NODE *p_obj_node = NULL;
216 auto queue_item = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].find(reinterpret_cast<uint64_t>(vkObj));
217 if (queue_item == device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT].end()) {
218 p_obj_node = new OBJTRACK_NODE;
219 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT][reinterpret_cast<uint64_t>(vkObj)] = p_obj_node;
220 device_data->num_objects[object_type]++;
221 device_data->num_total_objects++;
222 } else {
223 p_obj_node = queue_item->second;
224 }
225 p_obj_node->object_type = object_type;
226 p_obj_node->status = OBJSTATUS_NONE;
227 p_obj_node->handle = reinterpret_cast<uint64_t>(vkObj);
228}
229
230static void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain) {
231 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
232 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
233 reinterpret_cast<uint64_t &>(swapchain_image), __LINE__, OBJTRACK_NONE, LayerName,
234 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, "SwapchainImage",
235 reinterpret_cast<uint64_t &>(swapchain_image));
236
237 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
238 pNewObjNode->object_type = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
239 pNewObjNode->status = OBJSTATUS_NONE;
240 pNewObjNode->handle = reinterpret_cast<uint64_t &>(swapchain_image);
241 pNewObjNode->parent_object = reinterpret_cast<uint64_t &>(swapchain);
242 device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
243}
244
Chris Forbes64a31a12016-10-04 14:54:13 +1300245template<typename T>
246uint64_t handle_value(T handle) {
247 return reinterpret_cast<uint64_t &>(handle);
248}
249template<typename T>
250uint64_t handle_value(T *handle) {
251 return reinterpret_cast<uint64_t>(handle);
252}
253
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600254template <typename T1, typename T2>
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -0600255static void CreateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, const VkAllocationCallbacks *pAllocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600256 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
Chris Forbes04257ad2016-10-04 10:08:31 +1300257
Chris Forbes64a31a12016-10-04 14:54:13 +1300258 auto object_handle = handle_value(object);
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -0600259 bool custom_allocator = pAllocator != nullptr;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600260
Chris Forbesfeecd402016-09-29 14:53:50 +1300261 log_msg(instance_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, object_type, object_handle,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600262 __LINE__, OBJTRACK_NONE, LayerName, "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++,
Chris Forbesfeecd402016-09-29 14:53:50 +1300263 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600264
265 OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
266 pNewObjNode->object_type = object_type;
Chris Forbesdbfe96a2016-09-29 13:51:10 +1300267 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
Chris Forbesfeecd402016-09-29 14:53:50 +1300268 pNewObjNode->handle = object_handle;
269 instance_data->object_map[object_type][object_handle] = pNewObjNode;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600270 instance_data->num_objects[object_type]++;
271 instance_data->num_total_objects++;
272}
273
274template <typename T1, typename T2>
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -0600275static void DestroyObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, const VkAllocationCallbacks *pAllocator) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600276 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
277
Chris Forbes64a31a12016-10-04 14:54:13 +1300278 auto object_handle = handle_value(object);
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -0600279 bool custom_allocator = pAllocator != nullptr;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600280
281 auto item = device_data->object_map[object_type].find(object_handle);
282 if (item != device_data->object_map[object_type].end()) {
283
284 OBJTRACK_NODE *pNode = item->second;
285 assert(device_data->num_total_objects > 0);
286 device_data->num_total_objects--;
287 assert(device_data->num_objects[pNode->object_type] > 0);
288 device_data->num_objects[pNode->object_type]--;
289
290 log_msg(device_data->report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, pNode->object_type, object_handle, __LINE__,
291 OBJTRACK_NONE, LayerName,
292 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
293 object_name[pNode->object_type], reinterpret_cast<uint64_t &>(object), device_data->num_total_objects,
294 device_data->num_objects[pNode->object_type], object_name[pNode->object_type]);
295
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -0600296 auto allocated_with_custom = (pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
Chris Forbes3e51a202016-09-29 14:35:09 +1300297 if (custom_allocator ^ allocated_with_custom) {
298 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, __LINE__,
299 OBJTRACK_ALLOCATOR_MISMATCH, LayerName,
300 "Custom allocator %sspecified while destroying %s obj 0x%" PRIxLEAST64 " but %sspecified at creation",
301 (custom_allocator ? "" : "not "), object_name[object_type], object_handle,
302 (allocated_with_custom ? "" : "not "));
303 }
304
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600305 delete pNode;
306 device_data->object_map[object_type].erase(item);
307 } else {
308 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
309 OBJTRACK_UNKNOWN_OBJECT, LayerName,
Mark Lobodzinski45e68612016-07-18 17:06:52 -0600310 "Unable to remove %s obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
311 object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600312 }
313}
314
315template <typename T1, typename T2>
Chris Forbes2f271a72016-09-29 14:58:08 +1300316static bool ValidateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type, bool null_allowed) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600317 if (null_allowed && (object == VK_NULL_HANDLE)) {
318 return false;
319 }
Chris Forbes64a31a12016-10-04 14:54:13 +1300320 auto object_handle = handle_value(object);
321
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600322 layer_data *device_data = get_my_data_ptr(get_dispatch_key(dispatchable_object), layer_data_map);
Chris Forbes2f271a72016-09-29 14:58:08 +1300323 if (device_data->object_map[object_type].find(object_handle) == device_data->object_map[object_type].end()) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600324 // If object is an image, also look for it in the swapchain image map
325 if ((object_type != VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT) ||
Chris Forbes2f271a72016-09-29 14:58:08 +1300326 (device_data->swapchainImageMap.find(object_handle) == device_data->swapchainImageMap.end())) {
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600327 return log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type,
Chris Forbes2f271a72016-09-29 14:58:08 +1300328 object_handle, __LINE__, OBJTRACK_INVALID_OBJECT, LayerName,
329 "Invalid %s Object 0x%" PRIxLEAST64, object_name[object_type], object_handle);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600330 }
331 }
332 return false;
333}
334
335static void DeviceReportUndestroyedObjects(VkDevice device, VkDebugReportObjectTypeEXT object_type) {
336 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
337 for (auto item = device_data->object_map[object_type].begin(); item != device_data->object_map[object_type].end();) {
338 OBJTRACK_NODE *object_info = item->second;
339 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_info->object_type, object_info->handle, __LINE__,
340 OBJTRACK_OBJECT_LEAK, LayerName,
341 "OBJ ERROR : For device 0x%" PRIxLEAST64 ", %s object 0x%" PRIxLEAST64 " has not been destroyed.",
342 reinterpret_cast<uint64_t>(device), object_name[object_type], object_info->handle);
343 item = device_data->object_map[object_type].erase(item);
344 }
345}
346
347VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
348 std::unique_lock<std::mutex> lock(global_lock);
349
350 dispatch_key key = get_dispatch_key(instance);
351 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
352
353 // Enable the temporary callback(s) here to catch cleanup issues:
354 bool callback_setup = false;
355 if (instance_data->num_tmp_callbacks > 0) {
356 if (!layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
357 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
358 callback_setup = true;
359 }
360 }
361
Chris Forbes2f271a72016-09-29 14:58:08 +1300362 ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600363
Chris Forbesec461992016-09-29 14:41:44 +1300364 DestroyObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600365 // Report any remaining objects in LL
366
367 for (auto iit = instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].begin();
368 iit != instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].end();) {
369 OBJTRACK_NODE *pNode = iit->second;
370
371 VkDevice device = reinterpret_cast<VkDevice>(pNode->handle);
372
373 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->object_type, pNode->handle, __LINE__,
374 OBJTRACK_OBJECT_LEAK, LayerName, "OBJ ERROR : %s object 0x%" PRIxLEAST64 " has not been destroyed.",
375 string_VkDebugReportObjectTypeEXT(pNode->object_type), pNode->handle);
376 // Semaphore:
377 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT);
378 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
379 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
380 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
381 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
382 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
383 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
384 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
385 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
386 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
387 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
388 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
389 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
390 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
391 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
392 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
393 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
394 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
395 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
396 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
397 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
398 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
399 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
400 }
401 instance_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT].clear();
402
403 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
404 pInstanceTable->DestroyInstance(instance, pAllocator);
405
406 // Disable and cleanup the temporary callback(s):
407 if (callback_setup) {
408 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
409 }
410 if (instance_data->num_tmp_callbacks > 0) {
411 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
412 instance_data->num_tmp_callbacks = 0;
413 }
414
415 // Clean up logging callback, if any
416 while (instance_data->logging_callback.size() > 0) {
417 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
418 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
419 instance_data->logging_callback.pop_back();
420 }
421
422 layer_debug_report_destroy_instance(instance_data->report_data);
423 layer_data_map.erase(key);
424
425 instanceExtMap.erase(pInstanceTable);
426 lock.unlock();
427 ot_instance_table_map.erase(key);
428}
429
430VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
431
432 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300433 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Chris Forbesec461992016-09-29 14:41:44 +1300434 DestroyObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600435
436 // Report any remaining objects associated with this VkDevice object in LL
437 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT);
438 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT);
439 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT);
440 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
441 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
442 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT);
443 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT);
444 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT);
445 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT);
446 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT);
447 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT);
448 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT);
449 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT);
450 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
451 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT);
452 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT);
453 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT);
454 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
455 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT);
456 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT);
457 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT);
458 DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
459 // DeviceReportUndestroyedObjects(device, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT);
460
461 // Clean up Queue's MemRef Linked Lists
462 DestroyQueueDataStructures(device);
463
464 lock.unlock();
465
466 dispatch_key key = get_dispatch_key(device);
467 VkLayerDispatchTable *pDisp = get_dispatch_table(ot_device_table_map, device);
468 pDisp->DestroyDevice(device, pAllocator);
469 ot_device_table_map.erase(key);
470}
471
472VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) {
473 bool skip_call = false;
474 {
475 std::lock_guard<std::mutex> lock(global_lock);
476 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300477 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600478 }
479 if (skip_call) {
480 return;
481 }
482 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
483}
484
485VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
486 VkFormatProperties *pFormatProperties) {
487 bool skip_call = false;
488 {
489 std::lock_guard<std::mutex> lock(global_lock);
490 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300491 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600492 }
493 if (skip_call) {
494 return;
495 }
496 get_dispatch_table(ot_instance_table_map, physicalDevice)
497 ->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
498}
499
500VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
501 VkImageType type, VkImageTiling tiling,
502 VkImageUsageFlags usage, VkImageCreateFlags flags,
503 VkImageFormatProperties *pImageFormatProperties) {
504 bool skip_call = false;
505 {
506 std::lock_guard<std::mutex> lock(global_lock);
507 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300508 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600509 }
510 if (skip_call) {
511 return VK_ERROR_VALIDATION_FAILED_EXT;
512 }
513 VkResult result =
514 get_dispatch_table(ot_instance_table_map, physicalDevice)
515 ->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
516 return result;
517}
518
519VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) {
520 bool skip_call = false;
521 {
522 std::lock_guard<std::mutex> lock(global_lock);
523 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300524 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600525 }
526 if (skip_call) {
527 return;
528 }
529 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
530}
531
532VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
533 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
534 bool skip_call = false;
535 {
536 std::lock_guard<std::mutex> lock(global_lock);
537 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300538 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600539 }
540 if (skip_call) {
541 return;
542 }
543 get_dispatch_table(ot_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
544}
545
546VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *pName);
547
548VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *pName);
549
550VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount,
551 VkExtensionProperties *pProperties);
552
553VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties);
554
555VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
556 VkLayerProperties *pProperties);
557
558VKAPI_ATTR VkResult VKAPI_CALL QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
559 bool skip_call = false;
560 {
561 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300562 skip_call |= ValidateObject(queue, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600563 if (pSubmits) {
564 for (uint32_t idx0 = 0; idx0 < submitCount; ++idx0) {
565 if (pSubmits[idx0].pCommandBuffers) {
566 for (uint32_t idx1 = 0; idx1 < pSubmits[idx0].commandBufferCount; ++idx1) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300567 skip_call |= ValidateObject(queue, pSubmits[idx0].pCommandBuffers[idx1],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600568 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
569 }
570 }
571 if (pSubmits[idx0].pSignalSemaphores) {
572 for (uint32_t idx2 = 0; idx2 < pSubmits[idx0].signalSemaphoreCount; ++idx2) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300573 skip_call |= ValidateObject(queue, pSubmits[idx0].pSignalSemaphores[idx2],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600574 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
575 }
576 }
577 if (pSubmits[idx0].pWaitSemaphores) {
578 for (uint32_t idx3 = 0; idx3 < pSubmits[idx0].waitSemaphoreCount; ++idx3) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300579 skip_call |= ValidateObject(queue, pSubmits[idx0].pWaitSemaphores[idx3],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600580 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
581 }
582 }
583 }
584 }
585 if (queue) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300586 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600587 }
588 }
589 if (skip_call) {
590 return VK_ERROR_VALIDATION_FAILED_EXT;
591 }
592 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
593 return result;
594}
595
596VKAPI_ATTR VkResult VKAPI_CALL QueueWaitIdle(VkQueue queue) {
597 bool skip_call = false;
598 {
599 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300600 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600601 }
602 if (skip_call) {
603 return VK_ERROR_VALIDATION_FAILED_EXT;
604 }
605 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueWaitIdle(queue);
606 return result;
607}
608
609VKAPI_ATTR VkResult VKAPI_CALL DeviceWaitIdle(VkDevice device) {
610 bool skip_call = false;
611 {
612 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300613 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600614 }
615 if (skip_call) {
616 return VK_ERROR_VALIDATION_FAILED_EXT;
617 }
618 VkResult result = get_dispatch_table(ot_device_table_map, device)->DeviceWaitIdle(device);
619 return result;
620}
621
622VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
623 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
624 bool skip_call = false;
625 {
626 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300627 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600628 }
629 if (skip_call) {
630 return VK_ERROR_VALIDATION_FAILED_EXT;
631 }
632 VkResult result = get_dispatch_table(ot_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
633 {
634 std::lock_guard<std::mutex> lock(global_lock);
635 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300636 CreateObject(device, *pMemory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600637 }
638 }
639 return result;
640}
641
642VKAPI_ATTR VkResult VKAPI_CALL FlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
643 const VkMappedMemoryRange *pMemoryRanges) {
644 bool skip_call = false;
645 {
646 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300647 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600648 if (pMemoryRanges) {
649 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
650 if (pMemoryRanges[idx0].memory) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300651 skip_call |= ValidateObject(device, pMemoryRanges[idx0].memory,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600652 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
653 }
654 }
655 }
656 }
657 if (skip_call) {
658 return VK_ERROR_VALIDATION_FAILED_EXT;
659 }
660 VkResult result =
661 get_dispatch_table(ot_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
662 return result;
663}
664
665VKAPI_ATTR VkResult VKAPI_CALL InvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
666 const VkMappedMemoryRange *pMemoryRanges) {
667 bool skip_call = false;
668 {
669 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300670 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600671 if (pMemoryRanges) {
672 for (uint32_t idx0 = 0; idx0 < memoryRangeCount; ++idx0) {
673 if (pMemoryRanges[idx0].memory) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300674 skip_call |= ValidateObject(device, pMemoryRanges[idx0].memory,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600675 VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
676 }
677 }
678 }
679 }
680 if (skip_call) {
681 return VK_ERROR_VALIDATION_FAILED_EXT;
682 }
683 VkResult result =
684 get_dispatch_table(ot_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
685 return result;
686}
687
688VKAPI_ATTR void VKAPI_CALL GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
689 VkDeviceSize *pCommittedMemoryInBytes) {
690 bool skip_call = false;
691 {
692 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300693 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
694 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600695 }
696 if (skip_call) {
697 return;
698 }
699 get_dispatch_table(ot_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
700}
701
702VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
703 VkDeviceSize memoryOffset) {
704 bool skip_call = false;
705 {
706 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300707 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
708 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
709 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600710 }
711 if (skip_call) {
712 return VK_ERROR_VALIDATION_FAILED_EXT;
713 }
714 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindBufferMemory(device, buffer, memory, memoryOffset);
715 return result;
716}
717
718VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
719 bool skip_call = false;
720 {
721 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300722 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
723 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
724 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600725 }
726 if (skip_call) {
727 return VK_ERROR_VALIDATION_FAILED_EXT;
728 }
729 VkResult result = get_dispatch_table(ot_device_table_map, device)->BindImageMemory(device, image, memory, memoryOffset);
730 return result;
731}
732
733VKAPI_ATTR void VKAPI_CALL GetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
734 VkMemoryRequirements *pMemoryRequirements) {
735 bool skip_call = false;
736 {
737 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300738 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
739 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600740 }
741 if (skip_call) {
742 return;
743 }
744 get_dispatch_table(ot_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
745}
746
747VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) {
748 bool skip_call = false;
749 {
750 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300751 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
752 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600753 }
754 if (skip_call) {
755 return;
756 }
757 get_dispatch_table(ot_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
758}
759
760VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
761 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
762 bool skip_call = false;
763 {
764 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300765 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
766 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600767 }
768 if (skip_call) {
769 return;
770 }
771 get_dispatch_table(ot_device_table_map, device)
772 ->GetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
773}
774
775VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format,
776 VkImageType type, VkSampleCountFlagBits samples,
777 VkImageUsageFlags usage, VkImageTiling tiling,
778 uint32_t *pPropertyCount,
779 VkSparseImageFormatProperties *pProperties) {
780 bool skip_call = false;
781 {
782 std::lock_guard<std::mutex> lock(global_lock);
783 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +1300784 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600785 }
786 if (skip_call) {
787 return;
788 }
789 get_dispatch_table(ot_instance_table_map, physicalDevice)
790 ->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount,
791 pProperties);
792}
793
794VKAPI_ATTR VkResult VKAPI_CALL CreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
795 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
796 bool skip_call = false;
797 {
798 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300799 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600800 }
801 if (skip_call) {
802 return VK_ERROR_VALIDATION_FAILED_EXT;
803 }
804 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
805 {
806 std::lock_guard<std::mutex> lock(global_lock);
807 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300808 CreateObject(device, *pFence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600809 }
810 }
811 return result;
812}
813
814VKAPI_ATTR void VKAPI_CALL DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) {
815 bool skip_call = false;
816 {
817 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300818 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
819 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600820 }
821 if (skip_call) {
822 return;
823 }
824 {
825 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300826 DestroyObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600827 }
828 get_dispatch_table(ot_device_table_map, device)->DestroyFence(device, fence, pAllocator);
829}
830
831VKAPI_ATTR VkResult VKAPI_CALL ResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
832 bool skip_call = false;
833 {
834 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300835 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600836 if (pFences) {
837 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300838 skip_call |= ValidateObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600839 }
840 }
841 }
842 if (skip_call) {
843 return VK_ERROR_VALIDATION_FAILED_EXT;
844 }
845 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetFences(device, fenceCount, pFences);
846 return result;
847}
848
849VKAPI_ATTR VkResult VKAPI_CALL GetFenceStatus(VkDevice device, VkFence fence) {
850 bool skip_call = false;
851 {
852 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300853 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
854 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600855 }
856 if (skip_call) {
857 return VK_ERROR_VALIDATION_FAILED_EXT;
858 }
859 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetFenceStatus(device, fence);
860 return result;
861}
862
863VKAPI_ATTR VkResult VKAPI_CALL WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll,
864 uint64_t timeout) {
865 bool skip_call = false;
866 {
867 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300868 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600869 if (pFences) {
870 for (uint32_t idx0 = 0; idx0 < fenceCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +1300871 skip_call |= ValidateObject(device, pFences[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600872 }
873 }
874 }
875 if (skip_call) {
876 return VK_ERROR_VALIDATION_FAILED_EXT;
877 }
878 VkResult result = get_dispatch_table(ot_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
879 return result;
880}
881
882VKAPI_ATTR VkResult VKAPI_CALL CreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
883 const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore) {
884 bool skip_call = false;
885 {
886 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300887 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600888 }
889 if (skip_call) {
890 return VK_ERROR_VALIDATION_FAILED_EXT;
891 }
892 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
893 {
894 std::lock_guard<std::mutex> lock(global_lock);
895 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300896 CreateObject(device, *pSemaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600897 }
898 }
899 return result;
900}
901
902VKAPI_ATTR void VKAPI_CALL DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
903 bool skip_call = false;
904 {
905 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300906 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
907 skip_call |= ValidateObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600908 }
909 if (skip_call) {
910 return;
911 }
912 {
913 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300914 DestroyObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600915 }
916 get_dispatch_table(ot_device_table_map, device)->DestroySemaphore(device, semaphore, pAllocator);
917}
918
919VKAPI_ATTR VkResult VKAPI_CALL CreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
920 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
921 bool skip_call = false;
922 {
923 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300924 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600925 }
926 if (skip_call) {
927 return VK_ERROR_VALIDATION_FAILED_EXT;
928 }
929 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
930 {
931 std::lock_guard<std::mutex> lock(global_lock);
932 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +1300933 CreateObject(device, *pEvent, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600934 }
935 }
936 return result;
937}
938
939VKAPI_ATTR void VKAPI_CALL DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
940 bool skip_call = false;
941 {
942 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300943 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
944 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600945 }
946 if (skip_call) {
947 return;
948 }
949 {
950 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +1300951 DestroyObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600952 }
953 get_dispatch_table(ot_device_table_map, device)->DestroyEvent(device, event, pAllocator);
954}
955
956VKAPI_ATTR VkResult VKAPI_CALL GetEventStatus(VkDevice device, VkEvent event) {
957 bool skip_call = false;
958 {
959 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300960 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
961 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600962 }
963 if (skip_call) {
964 return VK_ERROR_VALIDATION_FAILED_EXT;
965 }
966 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetEventStatus(device, event);
967 return result;
968}
969
970VKAPI_ATTR VkResult VKAPI_CALL SetEvent(VkDevice device, VkEvent event) {
971 bool skip_call = false;
972 {
973 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300974 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
975 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600976 }
977 if (skip_call) {
978 return VK_ERROR_VALIDATION_FAILED_EXT;
979 }
980 VkResult result = get_dispatch_table(ot_device_table_map, device)->SetEvent(device, event);
981 return result;
982}
983
984VKAPI_ATTR VkResult VKAPI_CALL ResetEvent(VkDevice device, VkEvent event) {
985 bool skip_call = false;
986 {
987 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +1300988 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
989 skip_call |= ValidateObject(device, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -0600990 }
991 if (skip_call) {
992 return VK_ERROR_VALIDATION_FAILED_EXT;
993 }
994 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetEvent(device, event);
995 return result;
996}
997
998VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
999 const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
1000 bool skip_call = false;
1001 {
1002 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001003 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001004 }
1005 if (skip_call) {
1006 return VK_ERROR_VALIDATION_FAILED_EXT;
1007 }
1008 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1009 {
1010 std::lock_guard<std::mutex> lock(global_lock);
1011 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001012 CreateObject(device, *pQueryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001013 }
1014 }
1015 return result;
1016}
1017
1018VKAPI_ATTR void VKAPI_CALL DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
1019 bool skip_call = false;
1020 {
1021 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001022 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1023 skip_call |= ValidateObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001024 }
1025 if (skip_call) {
1026 return;
1027 }
1028 {
1029 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001030 DestroyObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001031 }
1032 get_dispatch_table(ot_device_table_map, device)->DestroyQueryPool(device, queryPool, pAllocator);
1033}
1034
1035VKAPI_ATTR VkResult VKAPI_CALL GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
1036 size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1037 bool skip_call = false;
1038 {
1039 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001040 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1041 skip_call |= ValidateObject(device, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001042 }
1043 if (skip_call) {
1044 return VK_ERROR_VALIDATION_FAILED_EXT;
1045 }
1046 VkResult result = get_dispatch_table(ot_device_table_map, device)
1047 ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
1048 return result;
1049}
1050
1051VKAPI_ATTR VkResult VKAPI_CALL CreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1052 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1053 bool skip_call = false;
1054 {
1055 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001056 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001057 }
1058 if (skip_call) {
1059 return VK_ERROR_VALIDATION_FAILED_EXT;
1060 }
1061 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1062 {
1063 std::lock_guard<std::mutex> lock(global_lock);
1064 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001065 CreateObject(device, *pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001066 }
1067 }
1068 return result;
1069}
1070
1071VKAPI_ATTR void VKAPI_CALL DestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
1072 bool skip_call = false;
1073 {
1074 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001075 skip_call |= ValidateObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1076 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001077 }
1078 if (skip_call) {
1079 return;
1080 }
1081 {
1082 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001083 DestroyObject(device, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001084 }
1085 get_dispatch_table(ot_device_table_map, device)->DestroyBuffer(device, buffer, pAllocator);
1086}
1087
1088VKAPI_ATTR VkResult VKAPI_CALL CreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1089 const VkAllocationCallbacks *pAllocator, VkBufferView *pView) {
1090 bool skip_call = false;
1091 {
1092 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001093 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001094 if (pCreateInfo) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001095 skip_call |= ValidateObject(device, pCreateInfo->buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001096 }
1097 }
1098 if (skip_call) {
1099 return VK_ERROR_VALIDATION_FAILED_EXT;
1100 }
1101 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1102 {
1103 std::lock_guard<std::mutex> lock(global_lock);
1104 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001105 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001106 }
1107 }
1108 return result;
1109}
1110
1111VKAPI_ATTR void VKAPI_CALL DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
1112 bool skip_call = false;
1113 {
1114 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001115 skip_call |= ValidateObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, false);
1116 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001117 }
1118 if (skip_call) {
1119 return;
1120 }
1121 {
1122 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001123 DestroyObject(device, bufferView, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001124 }
1125 get_dispatch_table(ot_device_table_map, device)->DestroyBufferView(device, bufferView, pAllocator);
1126}
1127
1128VKAPI_ATTR VkResult VKAPI_CALL CreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1129 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1130 bool skip_call = false;
1131 {
1132 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001133 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001134 }
1135 if (skip_call) {
1136 return VK_ERROR_VALIDATION_FAILED_EXT;
1137 }
1138 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
1139 {
1140 std::lock_guard<std::mutex> lock(global_lock);
1141 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001142 CreateObject(device, *pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001143 }
1144 }
1145 return result;
1146}
1147
1148VKAPI_ATTR void VKAPI_CALL DestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
1149 bool skip_call = false;
1150 {
1151 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001152 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1153 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001154 }
1155 if (skip_call) {
1156 return;
1157 }
1158 {
1159 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001160 DestroyObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001161 }
1162 get_dispatch_table(ot_device_table_map, device)->DestroyImage(device, image, pAllocator);
1163}
1164
1165VKAPI_ATTR void VKAPI_CALL GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource,
1166 VkSubresourceLayout *pLayout) {
1167 bool skip_call = false;
1168 {
1169 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001170 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1171 skip_call |= ValidateObject(device, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001172 }
1173 if (skip_call) {
1174 return;
1175 }
1176 get_dispatch_table(ot_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1177}
1178
1179VKAPI_ATTR VkResult VKAPI_CALL CreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1180 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1181 bool skip_call = false;
1182 {
1183 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001184 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001185 if (pCreateInfo) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001186 skip_call |= ValidateObject(device, pCreateInfo->image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001187 }
1188 }
1189 if (skip_call) {
1190 return VK_ERROR_VALIDATION_FAILED_EXT;
1191 }
1192 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
1193 {
1194 std::lock_guard<std::mutex> lock(global_lock);
1195 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001196 CreateObject(device, *pView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001197 }
1198 }
1199 return result;
1200}
1201
1202VKAPI_ATTR void VKAPI_CALL DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
1203 bool skip_call = false;
1204 {
1205 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001206 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1207 skip_call |= ValidateObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001208 }
1209 if (skip_call) {
1210 return;
1211 }
1212 {
1213 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001214 DestroyObject(device, imageView, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001215 }
1216 get_dispatch_table(ot_device_table_map, device)->DestroyImageView(device, imageView, pAllocator);
1217}
1218
1219VKAPI_ATTR VkResult VKAPI_CALL CreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo,
1220 const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule) {
1221 bool skip_call = false;
1222 {
1223 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001224 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001225 }
1226 if (skip_call) {
1227 return VK_ERROR_VALIDATION_FAILED_EXT;
1228 }
1229 VkResult result =
1230 get_dispatch_table(ot_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
1231 {
1232 std::lock_guard<std::mutex> lock(global_lock);
1233 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001234 CreateObject(device, *pShaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001235 }
1236 }
1237 return result;
1238}
1239
1240VKAPI_ATTR void VKAPI_CALL DestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1241 const VkAllocationCallbacks *pAllocator) {
1242 bool skip_call = false;
1243 {
1244 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001245 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1246 skip_call |= ValidateObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001247 }
1248 if (skip_call) {
1249 return;
1250 }
1251 {
1252 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001253 DestroyObject(device, shaderModule, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001254 }
1255 get_dispatch_table(ot_device_table_map, device)->DestroyShaderModule(device, shaderModule, pAllocator);
1256}
1257
1258VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo,
1259 const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache) {
1260 bool skip_call = false;
1261 {
1262 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001263 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001264 }
1265 if (skip_call) {
1266 return VK_ERROR_VALIDATION_FAILED_EXT;
1267 }
1268 VkResult result =
1269 get_dispatch_table(ot_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
1270 {
1271 std::lock_guard<std::mutex> lock(global_lock);
1272 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001273 CreateObject(device, *pPipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001274 }
1275 }
1276 return result;
1277}
1278
1279VKAPI_ATTR void VKAPI_CALL DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1280 const VkAllocationCallbacks *pAllocator) {
1281 bool skip_call = false;
1282 {
1283 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001284 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1285 skip_call |= ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001286 }
1287 if (skip_call) {
1288 return;
1289 }
1290 {
1291 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001292 DestroyObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001293 }
1294 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineCache(device, pipelineCache, pAllocator);
1295}
1296
1297VKAPI_ATTR VkResult VKAPI_CALL GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize,
1298 void *pData) {
1299 bool skip_call = false;
1300 {
1301 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001302 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1303 skip_call |= ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001304 }
1305 if (skip_call) {
1306 return VK_ERROR_VALIDATION_FAILED_EXT;
1307 }
1308 VkResult result =
1309 get_dispatch_table(ot_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1310 return result;
1311}
1312
1313VKAPI_ATTR VkResult VKAPI_CALL MergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount,
1314 const VkPipelineCache *pSrcCaches) {
1315 bool skip_call = false;
1316 {
1317 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001318 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1319 skip_call |= ValidateObject(device, dstCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001320 if (pSrcCaches) {
1321 for (uint32_t idx0 = 0; idx0 < srcCacheCount; ++idx0) {
1322 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001323 ValidateObject(device, pSrcCaches[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001324 }
1325 }
1326 }
1327 if (skip_call) {
1328 return VK_ERROR_VALIDATION_FAILED_EXT;
1329 }
1330 VkResult result =
1331 get_dispatch_table(ot_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
1332 return result;
1333}
1334
1335VKAPI_ATTR void VKAPI_CALL DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
1336 bool skip_call = false;
1337 {
1338 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001339 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1340 skip_call |= ValidateObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001341 }
1342 if (skip_call) {
1343 return;
1344 }
1345 {
1346 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001347 DestroyObject(device, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001348 }
1349 get_dispatch_table(ot_device_table_map, device)->DestroyPipeline(device, pipeline, pAllocator);
1350}
1351
1352VKAPI_ATTR VkResult VKAPI_CALL CreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo,
1353 const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout) {
1354 bool skip_call = false;
1355 {
1356 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001357 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001358 if (pCreateInfo) {
1359 if (pCreateInfo->pSetLayouts) {
1360 for (uint32_t idx0 = 0; idx0 < pCreateInfo->setLayoutCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001361 skip_call |= ValidateObject(device, pCreateInfo->pSetLayouts[idx0],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001362 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
1363 }
1364 }
1365 }
1366 }
1367 if (skip_call) {
1368 return VK_ERROR_VALIDATION_FAILED_EXT;
1369 }
1370 VkResult result =
1371 get_dispatch_table(ot_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
1372 {
1373 std::lock_guard<std::mutex> lock(global_lock);
1374 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001375 CreateObject(device, *pPipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001376 }
1377 }
1378 return result;
1379}
1380
1381VKAPI_ATTR void VKAPI_CALL DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1382 const VkAllocationCallbacks *pAllocator) {
1383 bool skip_call = false;
1384 {
1385 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001386 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1387 skip_call |= ValidateObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001388 }
1389 if (skip_call) {
1390 return;
1391 }
1392 {
1393 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001394 DestroyObject(device, pipelineLayout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001395 }
1396 get_dispatch_table(ot_device_table_map, device)->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1397}
1398
1399VKAPI_ATTR VkResult VKAPI_CALL CreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1400 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1401 bool skip_call = false;
1402 {
1403 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001404 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001405 }
1406 if (skip_call) {
1407 return VK_ERROR_VALIDATION_FAILED_EXT;
1408 }
1409 VkResult result = get_dispatch_table(ot_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1410 {
1411 std::lock_guard<std::mutex> lock(global_lock);
1412 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001413 CreateObject(device, *pSampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001414 }
1415 }
1416 return result;
1417}
1418
1419VKAPI_ATTR void VKAPI_CALL DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
1420 bool skip_call = false;
1421 {
1422 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001423 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1424 skip_call |= ValidateObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001425 }
1426 if (skip_call) {
1427 return;
1428 }
1429 {
1430 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001431 DestroyObject(device, sampler, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001432 }
1433 get_dispatch_table(ot_device_table_map, device)->DestroySampler(device, sampler, pAllocator);
1434}
1435
1436VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1437 const VkAllocationCallbacks *pAllocator,
1438 VkDescriptorSetLayout *pSetLayout) {
1439 bool skip_call = false;
1440 {
1441 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001442 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001443 if (pCreateInfo) {
1444 if (pCreateInfo->pBindings) {
1445 for (uint32_t idx0 = 0; idx0 < pCreateInfo->bindingCount; ++idx0) {
1446 if (pCreateInfo->pBindings[idx0].pImmutableSamplers) {
1447 for (uint32_t idx1 = 0; idx1 < pCreateInfo->pBindings[idx0].descriptorCount; ++idx1) {
1448 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001449 ValidateObject(device, pCreateInfo->pBindings[idx0].pImmutableSamplers[idx1],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001450 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1451 }
1452 }
1453 }
1454 }
1455 }
1456 }
1457 if (skip_call) {
1458 return VK_ERROR_VALIDATION_FAILED_EXT;
1459 }
1460 VkResult result =
1461 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
1462 {
1463 std::lock_guard<std::mutex> lock(global_lock);
1464 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001465 CreateObject(device, *pSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001466 }
1467 }
1468 return result;
1469}
1470
1471VKAPI_ATTR void VKAPI_CALL DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
1472 const VkAllocationCallbacks *pAllocator) {
1473 bool skip_call = false;
1474 {
1475 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001476 skip_call |= ValidateObject(device, descriptorSetLayout,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001477 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
Chris Forbes2f271a72016-09-29 14:58:08 +13001478 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001479 }
1480 if (skip_call) {
1481 return;
1482 }
1483 {
1484 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001485 DestroyObject(device, descriptorSetLayout, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001486 }
1487 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1488}
1489
1490VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo,
1491 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool) {
1492 bool skip_call = false;
1493 {
1494 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001495 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001496 }
1497 if (skip_call) {
1498 return VK_ERROR_VALIDATION_FAILED_EXT;
1499 }
1500 VkResult result =
1501 get_dispatch_table(ot_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
1502 {
1503 std::lock_guard<std::mutex> lock(global_lock);
1504 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001505 CreateObject(device, *pDescriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001506 }
1507 }
1508 return result;
1509}
1510
1511VKAPI_ATTR VkResult VKAPI_CALL ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1512 VkDescriptorPoolResetFlags flags) {
1513 bool skip_call = false;
Chris Forbes2a947ce2016-09-29 18:47:50 +13001514 std::unique_lock<std::mutex> lock(global_lock);
1515 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
1516 skip_call |= ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
1517 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001518 if (skip_call) {
1519 return VK_ERROR_VALIDATION_FAILED_EXT;
1520 }
Chris Forbes2a947ce2016-09-29 18:47:50 +13001521 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is reset.
1522 // Remove this pool's descriptor sets from our descriptorSet map.
1523 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
1524 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
1525 OBJTRACK_NODE *pNode = (*itr).second;
1526 auto del_itr = itr++;
1527 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
1528 DestroyObject(device, (VkDescriptorSet)((*del_itr).first),
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06001529 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr);
Chris Forbes2a947ce2016-09-29 18:47:50 +13001530 }
1531 }
1532 lock.unlock();
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001533 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
1534 return result;
1535}
1536
1537VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1538 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
1539 const VkCopyDescriptorSet *pDescriptorCopies) {
1540 bool skip_call = false;
1541 {
1542 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001543 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001544 if (pDescriptorCopies) {
1545 for (uint32_t idx0 = 0; idx0 < descriptorCopyCount; ++idx0) {
1546 if (pDescriptorCopies[idx0].dstSet) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001547 skip_call |= ValidateObject(device, pDescriptorCopies[idx0].dstSet,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001548 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1549 }
1550 if (pDescriptorCopies[idx0].srcSet) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001551 skip_call |= ValidateObject(device, pDescriptorCopies[idx0].srcSet,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001552 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1553 }
1554 }
1555 }
1556 if (pDescriptorWrites) {
1557 for (uint32_t idx1 = 0; idx1 < descriptorWriteCount; ++idx1) {
1558 if (pDescriptorWrites[idx1].dstSet) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001559 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].dstSet,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001560 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1561 }
1562 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
1563 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
1564 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
1565 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
1566 for (uint32_t idx2 = 0; idx2 < pDescriptorWrites[idx1].descriptorCount; ++idx2) {
1567 if (pDescriptorWrites[idx1].pBufferInfo[idx2].buffer) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001568 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pBufferInfo[idx2].buffer,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001569 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
1570 }
1571 }
1572 }
1573 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
1574 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
1575 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) ||
1576 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
1577 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)) {
1578 for (uint32_t idx3 = 0; idx3 < pDescriptorWrites[idx1].descriptorCount; ++idx3) {
1579 if (pDescriptorWrites[idx1].pImageInfo[idx3].imageView) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001580 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].imageView,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001581 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1582 }
1583 if (pDescriptorWrites[idx1].pImageInfo[idx3].sampler) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001584 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pImageInfo[idx3].sampler,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001585 VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, false);
1586 }
1587 }
1588 }
1589 if ((pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
1590 (pDescriptorWrites[idx1].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)) {
1591 for (uint32_t idx4 = 0; idx4 < pDescriptorWrites[idx1].descriptorCount; ++idx4) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001592 skip_call |= ValidateObject(device, pDescriptorWrites[idx1].pTexelBufferView[idx4],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001593 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, true);
1594 }
1595 }
1596 }
1597 }
1598 }
1599 if (skip_call) {
1600 return;
1601 }
1602 get_dispatch_table(ot_device_table_map, device)
1603 ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1604}
1605
1606VKAPI_ATTR VkResult VKAPI_CALL CreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1607 const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer) {
1608 bool skip_call = false;
1609 {
1610 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001611 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001612 if (pCreateInfo) {
1613 if (pCreateInfo->pAttachments) {
1614 for (uint32_t idx0 = 0; idx0 < pCreateInfo->attachmentCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001615 skip_call |= ValidateObject(device, pCreateInfo->pAttachments[idx0],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001616 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, false);
1617 }
1618 }
1619 if (pCreateInfo->renderPass) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001620 skip_call |= ValidateObject(device, pCreateInfo->renderPass,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001621 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
1622 }
1623 }
1624 }
1625 if (skip_call) {
1626 return VK_ERROR_VALIDATION_FAILED_EXT;
1627 }
1628 VkResult result =
1629 get_dispatch_table(ot_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
1630 {
1631 std::lock_guard<std::mutex> lock(global_lock);
1632 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001633 CreateObject(device, *pFramebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001634 }
1635 }
1636 return result;
1637}
1638
1639VKAPI_ATTR void VKAPI_CALL DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) {
1640 bool skip_call = false;
1641 {
1642 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001643 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1644 skip_call |= ValidateObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001645 }
1646 if (skip_call) {
1647 return;
1648 }
1649 {
1650 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001651 DestroyObject(device, framebuffer, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001652 }
1653 get_dispatch_table(ot_device_table_map, device)->DestroyFramebuffer(device, framebuffer, pAllocator);
1654}
1655
1656VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1657 const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
1658 bool skip_call = false;
1659 {
1660 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001661 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001662 }
1663 if (skip_call) {
1664 return VK_ERROR_VALIDATION_FAILED_EXT;
1665 }
1666 VkResult result =
1667 get_dispatch_table(ot_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1668 {
1669 std::lock_guard<std::mutex> lock(global_lock);
1670 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001671 CreateObject(device, *pRenderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001672 }
1673 }
1674 return result;
1675}
1676
1677VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
1678 bool skip_call = false;
1679 {
1680 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001681 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1682 skip_call |= ValidateObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001683 }
1684 if (skip_call) {
1685 return;
1686 }
1687 {
1688 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13001689 DestroyObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001690 }
1691 get_dispatch_table(ot_device_table_map, device)->DestroyRenderPass(device, renderPass, pAllocator);
1692}
1693
1694VKAPI_ATTR void VKAPI_CALL GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) {
1695 bool skip_call = false;
1696 {
1697 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001698 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
1699 skip_call |= ValidateObject(device, renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001700 }
1701 if (skip_call) {
1702 return;
1703 }
1704 get_dispatch_table(ot_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
1705}
1706
1707VKAPI_ATTR VkResult VKAPI_CALL CreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1708 const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool) {
1709 bool skip_call = false;
1710 {
1711 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001712 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001713 }
1714 if (skip_call) {
1715 return VK_ERROR_VALIDATION_FAILED_EXT;
1716 }
1717 VkResult result =
1718 get_dispatch_table(ot_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
1719 {
1720 std::lock_guard<std::mutex> lock(global_lock);
1721 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13001722 CreateObject(device, *pCommandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001723 }
1724 }
1725 return result;
1726}
1727
1728VKAPI_ATTR VkResult VKAPI_CALL ResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
1729 bool skip_call = false;
1730 {
1731 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001732 skip_call |= ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
1733 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001734 }
1735 if (skip_call) {
1736 return VK_ERROR_VALIDATION_FAILED_EXT;
1737 }
1738 VkResult result = get_dispatch_table(ot_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
1739 return result;
1740}
1741
1742VKAPI_ATTR VkResult VKAPI_CALL BeginCommandBuffer(VkCommandBuffer command_buffer, const VkCommandBufferBeginInfo *begin_info) {
1743 layer_data *device_data = get_my_data_ptr(get_dispatch_key(command_buffer), layer_data_map);
1744 bool skip_call = false;
1745 {
1746 std::lock_guard<std::mutex> lock(global_lock);
1747 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001748 ValidateObject(command_buffer, command_buffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001749 if (begin_info) {
1750 OBJTRACK_NODE *pNode =
1751 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT][reinterpret_cast<const uint64_t>(command_buffer)];
1752 if ((begin_info->pInheritanceInfo) && (pNode->status & OBJSTATUS_COMMAND_BUFFER_SECONDARY)) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001753 skip_call |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->framebuffer,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001754 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, true);
Chris Forbes2f271a72016-09-29 14:58:08 +13001755 skip_call |= ValidateObject(command_buffer, begin_info->pInheritanceInfo->renderPass,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001756 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, true);
1757 }
1758 }
1759 }
1760 if (skip_call) {
1761 return VK_ERROR_VALIDATION_FAILED_EXT;
1762 }
1763 VkResult result = get_dispatch_table(ot_device_table_map, command_buffer)->BeginCommandBuffer(command_buffer, begin_info);
1764 return result;
1765}
1766
1767VKAPI_ATTR VkResult VKAPI_CALL EndCommandBuffer(VkCommandBuffer commandBuffer) {
1768 bool skip_call = false;
1769 {
1770 std::lock_guard<std::mutex> lock(global_lock);
1771 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001772 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001773 }
1774 if (skip_call) {
1775 return VK_ERROR_VALIDATION_FAILED_EXT;
1776 }
1777 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
1778 return result;
1779}
1780
1781VKAPI_ATTR VkResult VKAPI_CALL ResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) {
1782 bool skip_call = false;
1783 {
1784 std::lock_guard<std::mutex> lock(global_lock);
1785 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001786 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001787 }
1788 if (skip_call) {
1789 return VK_ERROR_VALIDATION_FAILED_EXT;
1790 }
1791 VkResult result = get_dispatch_table(ot_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
1792 return result;
1793}
1794
1795VKAPI_ATTR void VKAPI_CALL CmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1796 VkPipeline pipeline) {
1797 bool skip_call = false;
1798 {
1799 std::lock_guard<std::mutex> lock(global_lock);
1800 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001801 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1802 skip_call |= ValidateObject(commandBuffer, pipeline, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001803 }
1804 if (skip_call) {
1805 return;
1806 }
1807 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1808}
1809
1810VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
1811 const VkViewport *pViewports) {
1812 bool skip_call = false;
1813 {
1814 std::lock_guard<std::mutex> lock(global_lock);
1815 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001816 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001817 }
1818 if (skip_call) {
1819 return;
1820 }
1821 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
1822}
1823
1824VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
1825 const VkRect2D *pScissors) {
1826 bool skip_call = false;
1827 {
1828 std::lock_guard<std::mutex> lock(global_lock);
1829 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001830 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001831 }
1832 if (skip_call) {
1833 return;
1834 }
1835 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1836}
1837
1838VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1839 bool skip_call = false;
1840 {
1841 std::lock_guard<std::mutex> lock(global_lock);
1842 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001843 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001844 }
1845 if (skip_call) {
1846 return;
1847 }
1848 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
1849}
1850
1851VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
1852 float depthBiasSlopeFactor) {
1853 bool skip_call = false;
1854 {
1855 std::lock_guard<std::mutex> lock(global_lock);
1856 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001857 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001858 }
1859 if (skip_call) {
1860 return;
1861 }
1862 get_dispatch_table(ot_device_table_map, commandBuffer)
1863 ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
1864}
1865
1866VKAPI_ATTR void VKAPI_CALL CmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) {
1867 bool skip_call = false;
1868 {
1869 std::lock_guard<std::mutex> lock(global_lock);
1870 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001871 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001872 }
1873 if (skip_call) {
1874 return;
1875 }
1876 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
1877}
1878
1879VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
1880 bool skip_call = false;
1881 {
1882 std::lock_guard<std::mutex> lock(global_lock);
1883 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001884 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001885 }
1886 if (skip_call) {
1887 return;
1888 }
1889 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1890}
1891
1892VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
1893 uint32_t compareMask) {
1894 bool skip_call = false;
1895 {
1896 std::lock_guard<std::mutex> lock(global_lock);
1897 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001898 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001899 }
1900 if (skip_call) {
1901 return;
1902 }
1903 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1904}
1905
1906VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
1907 bool skip_call = false;
1908 {
1909 std::lock_guard<std::mutex> lock(global_lock);
1910 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001911 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001912 }
1913 if (skip_call) {
1914 return;
1915 }
1916 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1917}
1918
1919VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
1920 bool skip_call = false;
1921 {
1922 std::lock_guard<std::mutex> lock(global_lock);
1923 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001924 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001925 }
1926 if (skip_call) {
1927 return;
1928 }
1929 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
1930}
1931
1932VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1933 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1934 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1935 const uint32_t *pDynamicOffsets) {
1936 bool skip_call = false;
1937 {
1938 std::lock_guard<std::mutex> lock(global_lock);
1939 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001940 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
1941 skip_call |= ValidateObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001942 if (pDescriptorSets) {
1943 for (uint32_t idx0 = 0; idx0 < descriptorSetCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +13001944 skip_call |= ValidateObject(commandBuffer, pDescriptorSets[idx0],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001945 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, false);
1946 }
1947 }
1948 }
1949 if (skip_call) {
1950 return;
1951 }
1952 get_dispatch_table(ot_device_table_map, commandBuffer)
1953 ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets,
1954 dynamicOffsetCount, pDynamicOffsets);
1955}
1956
1957VKAPI_ATTR void VKAPI_CALL CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
1958 VkIndexType indexType) {
1959 bool skip_call = false;
1960 {
1961 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13001962 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001963 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001964 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001965 }
1966 if (skip_call) {
1967 return;
1968 }
1969 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
1970}
1971
1972VKAPI_ATTR void VKAPI_CALL CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount,
1973 const VkBuffer *pBuffers, const VkDeviceSize *pOffsets) {
1974 bool skip_call = false;
1975 {
1976 std::lock_guard<std::mutex> lock(global_lock);
1977 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001978 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001979 if (pBuffers) {
1980 for (uint32_t idx0 = 0; idx0 < bindingCount; ++idx0) {
1981 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001982 ValidateObject(commandBuffer, pBuffers[idx0], VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06001983 }
1984 }
1985 }
1986 if (skip_call) {
1987 return;
1988 }
1989 get_dispatch_table(ot_device_table_map, commandBuffer)
1990 ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
1991}
1992
1993VKAPI_ATTR void VKAPI_CALL CmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount,
1994 uint32_t firstVertex, uint32_t firstInstance) {
1995 bool skip_call = false;
1996 {
1997 std::lock_guard<std::mutex> lock(global_lock);
1998 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13001999 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002000 }
2001 if (skip_call) {
2002 return;
2003 }
2004 get_dispatch_table(ot_device_table_map, commandBuffer)
2005 ->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
2006}
2007
2008VKAPI_ATTR void VKAPI_CALL CmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount,
2009 uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
2010 bool skip_call = false;
2011 {
2012 std::lock_guard<std::mutex> lock(global_lock);
2013 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002014 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002015 }
2016 if (skip_call) {
2017 return;
2018 }
2019 get_dispatch_table(ot_device_table_map, commandBuffer)
2020 ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2021}
2022
2023VKAPI_ATTR void VKAPI_CALL CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount,
2024 uint32_t stride) {
2025 bool skip_call = false;
2026 {
2027 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002028 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002029 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002030 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002031 }
2032 if (skip_call) {
2033 return;
2034 }
2035 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
2036}
2037
2038VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
2039 uint32_t drawCount, uint32_t stride) {
2040 bool skip_call = false;
2041 {
2042 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002043 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002044 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002045 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002046 }
2047 if (skip_call) {
2048 return;
2049 }
2050 get_dispatch_table(ot_device_table_map, commandBuffer)
2051 ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
2052}
2053
2054VKAPI_ATTR void VKAPI_CALL CmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z) {
2055 bool skip_call = false;
2056 {
2057 std::lock_guard<std::mutex> lock(global_lock);
2058 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002059 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002060 }
2061 if (skip_call) {
2062 return;
2063 }
2064 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
2065}
2066
2067VKAPI_ATTR void VKAPI_CALL CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) {
2068 bool skip_call = false;
2069 {
2070 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002071 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002072 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002073 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002074 }
2075 if (skip_call) {
2076 return;
2077 }
2078 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
2079}
2080
2081VKAPI_ATTR void VKAPI_CALL CmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer,
2082 uint32_t regionCount, const VkBufferCopy *pRegions) {
2083 bool skip_call = false;
2084 {
2085 std::lock_guard<std::mutex> lock(global_lock);
2086 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002087 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2088 skip_call |= ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2089 skip_call |= ValidateObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002090 }
2091 if (skip_call) {
2092 return;
2093 }
2094 get_dispatch_table(ot_device_table_map, commandBuffer)
2095 ->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
2096}
2097
2098VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2099 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2100 const VkImageCopy *pRegions) {
2101 bool skip_call = false;
2102 {
2103 std::lock_guard<std::mutex> lock(global_lock);
2104 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002105 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2106 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2107 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002108 }
2109 if (skip_call) {
2110 return;
2111 }
2112 get_dispatch_table(ot_device_table_map, commandBuffer)
2113 ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2114}
2115
2116VKAPI_ATTR void VKAPI_CALL CmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2117 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2118 const VkImageBlit *pRegions, VkFilter filter) {
2119 bool skip_call = false;
2120 {
2121 std::lock_guard<std::mutex> lock(global_lock);
2122 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002123 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2124 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2125 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002126 }
2127 if (skip_call) {
2128 return;
2129 }
2130 get_dispatch_table(ot_device_table_map, commandBuffer)
2131 ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
2132}
2133
2134VKAPI_ATTR void VKAPI_CALL CmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage,
2135 VkImageLayout dstImageLayout, uint32_t regionCount,
2136 const VkBufferImageCopy *pRegions) {
2137 bool skip_call = false;
2138 {
2139 std::lock_guard<std::mutex> lock(global_lock);
2140 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002141 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2142 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2143 skip_call |= ValidateObject(commandBuffer, srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002144 }
2145 if (skip_call) {
2146 return;
2147 }
2148 get_dispatch_table(ot_device_table_map, commandBuffer)
2149 ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
2150}
2151
2152VKAPI_ATTR void VKAPI_CALL CmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2153 VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions) {
2154 bool skip_call = false;
2155 {
2156 std::lock_guard<std::mutex> lock(global_lock);
2157 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002158 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2159 skip_call |= ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2160 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002161 }
2162 if (skip_call) {
2163 return;
2164 }
2165 get_dispatch_table(ot_device_table_map, commandBuffer)
2166 ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
2167}
2168
2169VKAPI_ATTR void VKAPI_CALL CmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2170 VkDeviceSize dataSize, const uint32_t *pData) {
2171 bool skip_call = false;
2172 {
2173 std::lock_guard<std::mutex> lock(global_lock);
2174 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002175 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2176 skip_call |= ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002177 }
2178 if (skip_call) {
2179 return;
2180 }
2181 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2182}
2183
2184VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2185 VkDeviceSize size, uint32_t data) {
2186 bool skip_call = false;
2187 {
2188 std::lock_guard<std::mutex> lock(global_lock);
2189 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002190 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2191 skip_call |= ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002192 }
2193 if (skip_call) {
2194 return;
2195 }
2196 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2197}
2198
2199VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2200 const VkClearColorValue *pColor, uint32_t rangeCount,
2201 const VkImageSubresourceRange *pRanges) {
2202 bool skip_call = false;
2203 {
2204 std::lock_guard<std::mutex> lock(global_lock);
2205 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002206 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2207 skip_call |= ValidateObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002208 }
2209 if (skip_call) {
2210 return;
2211 }
2212 get_dispatch_table(ot_device_table_map, commandBuffer)
2213 ->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
2214}
2215
2216VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
2217 const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount,
2218 const VkImageSubresourceRange *pRanges) {
2219 bool skip_call = false;
2220 {
2221 std::lock_guard<std::mutex> lock(global_lock);
2222 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002223 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2224 skip_call |= ValidateObject(commandBuffer, image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002225 }
2226 if (skip_call) {
2227 return;
2228 }
2229 get_dispatch_table(ot_device_table_map, commandBuffer)
2230 ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
2231}
2232
2233VKAPI_ATTR void VKAPI_CALL CmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2234 const VkClearAttachment *pAttachments, uint32_t rectCount,
2235 const VkClearRect *pRects) {
2236 bool skip_call = false;
2237 {
2238 std::lock_guard<std::mutex> lock(global_lock);
2239 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002240 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002241 }
2242 if (skip_call) {
2243 return;
2244 }
2245 get_dispatch_table(ot_device_table_map, commandBuffer)
2246 ->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
2247}
2248
2249VKAPI_ATTR void VKAPI_CALL CmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
2250 VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
2251 const VkImageResolve *pRegions) {
2252 bool skip_call = false;
2253 {
2254 std::lock_guard<std::mutex> lock(global_lock);
2255 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002256 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2257 skip_call |= ValidateObject(commandBuffer, dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2258 skip_call |= ValidateObject(commandBuffer, srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002259 }
2260 if (skip_call) {
2261 return;
2262 }
2263 get_dispatch_table(ot_device_table_map, commandBuffer)
2264 ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
2265}
2266
2267VKAPI_ATTR void VKAPI_CALL CmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2268 bool skip_call = false;
2269 {
2270 std::lock_guard<std::mutex> lock(global_lock);
2271 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002272 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2273 skip_call |= ValidateObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002274 }
2275 if (skip_call) {
2276 return;
2277 }
2278 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
2279}
2280
2281VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
2282 bool skip_call = false;
2283 {
2284 std::lock_guard<std::mutex> lock(global_lock);
2285 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002286 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2287 skip_call |= ValidateObject(commandBuffer, event, VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002288 }
2289 if (skip_call) {
2290 return;
2291 }
2292 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
2293}
2294
2295VKAPI_ATTR void VKAPI_CALL CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
2296 VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask,
2297 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2298 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2299 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2300 bool skip_call = false;
2301 {
2302 std::lock_guard<std::mutex> lock(global_lock);
2303 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002304 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002305 if (pBufferMemoryBarriers) {
2306 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2307 if (pBufferMemoryBarriers[idx0].buffer) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002308 skip_call |= ValidateObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002309 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2310 }
2311 }
2312 }
2313 if (pEvents) {
2314 for (uint32_t idx1 = 0; idx1 < eventCount; ++idx1) {
2315 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002316 ValidateObject(commandBuffer, pEvents[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002317 }
2318 }
2319 if (pImageMemoryBarriers) {
2320 for (uint32_t idx2 = 0; idx2 < imageMemoryBarrierCount; ++idx2) {
2321 if (pImageMemoryBarriers[idx2].image) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002322 skip_call |= ValidateObject(commandBuffer, pImageMemoryBarriers[idx2].image,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002323 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2324 }
2325 }
2326 }
2327 }
2328 if (skip_call) {
2329 return;
2330 }
2331 get_dispatch_table(ot_device_table_map, commandBuffer)
2332 ->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers,
2333 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2334}
2335
2336VKAPI_ATTR void VKAPI_CALL CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2337 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2338 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2339 uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2340 uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
2341 bool skip_call = false;
2342 {
2343 std::lock_guard<std::mutex> lock(global_lock);
2344 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002345 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002346 if (pBufferMemoryBarriers) {
2347 for (uint32_t idx0 = 0; idx0 < bufferMemoryBarrierCount; ++idx0) {
2348 if (pBufferMemoryBarriers[idx0].buffer) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002349 skip_call |= ValidateObject(commandBuffer, pBufferMemoryBarriers[idx0].buffer,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002350 VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2351 }
2352 }
2353 }
2354 if (pImageMemoryBarriers) {
2355 for (uint32_t idx1 = 0; idx1 < imageMemoryBarrierCount; ++idx1) {
2356 if (pImageMemoryBarriers[idx1].image) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002357 skip_call |= ValidateObject(commandBuffer, pImageMemoryBarriers[idx1].image,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002358 VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
2359 }
2360 }
2361 }
2362 }
2363 if (skip_call) {
2364 return;
2365 }
2366 get_dispatch_table(ot_device_table_map, commandBuffer)
2367 ->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers,
2368 bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2369}
2370
2371VKAPI_ATTR void VKAPI_CALL CmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query,
2372 VkQueryControlFlags flags) {
2373 bool skip_call = false;
2374 {
2375 std::lock_guard<std::mutex> lock(global_lock);
2376 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002377 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2378 skip_call |= ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002379 }
2380 if (skip_call) {
2381 return;
2382 }
2383 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, query, flags);
2384}
2385
2386VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) {
2387 bool skip_call = false;
2388 {
2389 std::lock_guard<std::mutex> lock(global_lock);
2390 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002391 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2392 skip_call |= ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002393 }
2394 if (skip_call) {
2395 return;
2396 }
2397 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, query);
2398}
2399
2400VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2401 uint32_t queryCount) {
2402 bool skip_call = false;
2403 {
2404 std::lock_guard<std::mutex> lock(global_lock);
2405 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002406 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2407 skip_call |= ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002408 }
2409 if (skip_call) {
2410 return;
2411 }
2412 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2413}
2414
2415VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
2416 VkQueryPool queryPool, uint32_t query) {
2417 bool skip_call = false;
2418 {
2419 std::lock_guard<std::mutex> lock(global_lock);
2420 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002421 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2422 skip_call |= ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002423 }
2424 if (skip_call) {
2425 return;
2426 }
2427 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
2428}
2429
2430VKAPI_ATTR void VKAPI_CALL CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
2431 uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
2432 VkDeviceSize stride, VkQueryResultFlags flags) {
2433 bool skip_call = false;
2434 {
2435 std::lock_guard<std::mutex> lock(global_lock);
2436 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002437 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2438 skip_call |= ValidateObject(commandBuffer, dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
2439 skip_call |= ValidateObject(commandBuffer, queryPool, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002440 }
2441 if (skip_call) {
2442 return;
2443 }
2444 get_dispatch_table(ot_device_table_map, commandBuffer)
2445 ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
2446}
2447
2448VKAPI_ATTR void VKAPI_CALL CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags,
2449 uint32_t offset, uint32_t size, const void *pValues) {
2450 bool skip_call = false;
2451 {
2452 std::lock_guard<std::mutex> lock(global_lock);
2453 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002454 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2455 skip_call |= ValidateObject(commandBuffer, layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002456 }
2457 if (skip_call) {
2458 return;
2459 }
2460 get_dispatch_table(ot_device_table_map, commandBuffer)
2461 ->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
2462}
2463
2464VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin,
2465 VkSubpassContents contents) {
2466 bool skip_call = false;
2467 {
2468 std::lock_guard<std::mutex> lock(global_lock);
2469 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002470 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002471 if (pRenderPassBegin) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002472 skip_call |= ValidateObject(commandBuffer, pRenderPassBegin->framebuffer,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002473 VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, false);
Chris Forbes2f271a72016-09-29 14:58:08 +13002474 skip_call |= ValidateObject(commandBuffer, pRenderPassBegin->renderPass,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002475 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
2476 }
2477 }
2478 if (skip_call) {
2479 return;
2480 }
2481 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2482}
2483
2484VKAPI_ATTR void VKAPI_CALL CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2485 bool skip_call = false;
2486 {
2487 std::lock_guard<std::mutex> lock(global_lock);
2488 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002489 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002490 }
2491 if (skip_call) {
2492 return;
2493 }
2494 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
2495}
2496
2497VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass(VkCommandBuffer commandBuffer) {
2498 bool skip_call = false;
2499 {
2500 std::lock_guard<std::mutex> lock(global_lock);
2501 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002502 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002503 }
2504 if (skip_call) {
2505 return;
2506 }
2507 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
2508}
2509
2510VKAPI_ATTR void VKAPI_CALL CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
2511 const VkCommandBuffer *pCommandBuffers) {
2512 bool skip_call = false;
2513 {
2514 std::lock_guard<std::mutex> lock(global_lock);
2515 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002516 ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002517 if (pCommandBuffers) {
2518 for (uint32_t idx0 = 0; idx0 < commandBufferCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002519 skip_call |= ValidateObject(commandBuffer, pCommandBuffers[idx0],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002520 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
2521 }
2522 }
2523 }
2524 if (skip_call) {
2525 return;
2526 }
2527 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
2528}
2529
2530VKAPI_ATTR void VKAPI_CALL DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {
2531 bool skip_call = false;
2532 {
2533 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002534 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
2535 skip_call |= ValidateObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002536 }
2537 if (skip_call) {
2538 return;
2539 }
2540 {
2541 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbesec461992016-09-29 14:41:44 +13002542 DestroyObject(instance, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002543 }
2544 get_dispatch_table(ot_instance_table_map, instance)->DestroySurfaceKHR(instance, surface, pAllocator);
2545}
2546
2547VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex,
2548 VkSurfaceKHR surface, VkBool32 *pSupported) {
2549 bool skip_call = false;
2550 {
2551 std::lock_guard<std::mutex> lock(global_lock);
2552 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002553 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2554 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002555 }
2556 if (skip_call) {
2557 return VK_ERROR_VALIDATION_FAILED_EXT;
2558 }
2559 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2560 ->GetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported);
2561 return result;
2562}
2563
2564VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2565 VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
2566 bool skip_call = false;
2567 {
2568 std::lock_guard<std::mutex> lock(global_lock);
2569 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002570 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2571 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002572 }
2573 if (skip_call) {
2574 return VK_ERROR_VALIDATION_FAILED_EXT;
2575 }
2576 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2577 ->GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities);
2578 return result;
2579}
2580
2581VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2582 uint32_t *pSurfaceFormatCount,
2583 VkSurfaceFormatKHR *pSurfaceFormats) {
2584 bool skip_call = false;
2585 {
2586 std::lock_guard<std::mutex> lock(global_lock);
2587 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002588 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2589 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002590 }
2591 if (skip_call) {
2592 return VK_ERROR_VALIDATION_FAILED_EXT;
2593 }
2594 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2595 ->GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
2596 return result;
2597}
2598
2599VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
2600 uint32_t *pPresentModeCount,
2601 VkPresentModeKHR *pPresentModes) {
2602 bool skip_call = false;
2603 {
2604 std::lock_guard<std::mutex> lock(global_lock);
2605 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002606 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
2607 skip_call |= ValidateObject(physicalDevice, surface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002608 }
2609 if (skip_call) {
2610 return VK_ERROR_VALIDATION_FAILED_EXT;
2611 }
2612 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2613 ->GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
2614 return result;
2615}
2616
2617VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
2618 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
2619 bool skip_call = false;
2620 {
2621 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002622 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002623 if (pCreateInfo) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002624 skip_call |= ValidateObject(device, pCreateInfo->oldSwapchain,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002625 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2626 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Chris Forbes2f271a72016-09-29 14:58:08 +13002627 skip_call |= ValidateObject(device_data->physical_device, pCreateInfo->surface,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002628 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2629 }
2630 }
2631 if (skip_call) {
2632 return VK_ERROR_VALIDATION_FAILED_EXT;
2633 }
2634 VkResult result =
2635 get_dispatch_table(ot_device_table_map, device)->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
2636 {
2637 std::lock_guard<std::mutex> lock(global_lock);
2638 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002639 CreateObject(device, *pSwapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002640 }
2641 }
2642 return result;
2643}
2644
2645VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout,
2646 VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
2647 bool skip_call = false;
2648 {
2649 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002650 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
2651 skip_call |= ValidateObject(device, fence, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, true);
2652 skip_call |= ValidateObject(device, semaphore, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, true);
2653 skip_call |= ValidateObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002654 }
2655 if (skip_call) {
2656 return VK_ERROR_VALIDATION_FAILED_EXT;
2657 }
2658 VkResult result = get_dispatch_table(ot_device_table_map, device)
2659 ->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
2660 return result;
2661}
2662
2663VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
2664 bool skip_call = false;
2665 {
2666 std::lock_guard<std::mutex> lock(global_lock);
2667 if (pPresentInfo) {
2668 if (pPresentInfo->pSwapchains) {
2669 for (uint32_t idx0 = 0; idx0 < pPresentInfo->swapchainCount; ++idx0) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002670 skip_call |= ValidateObject(queue, pPresentInfo->pSwapchains[idx0],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002671 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, false);
2672 }
2673 }
2674 if (pPresentInfo->pWaitSemaphores) {
2675 for (uint32_t idx1 = 0; idx1 < pPresentInfo->waitSemaphoreCount; ++idx1) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002676 skip_call |= ValidateObject(queue, pPresentInfo->pWaitSemaphores[idx1],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002677 VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, false);
2678 }
2679 }
2680 }
Chris Forbes2f271a72016-09-29 14:58:08 +13002681 skip_call |= ValidateObject(queue, queue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002682 }
2683 if (skip_call) {
2684 return VK_ERROR_VALIDATION_FAILED_EXT;
2685 }
2686 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueuePresentKHR(queue, pPresentInfo);
2687 return result;
2688}
2689
2690#ifdef VK_USE_PLATFORM_WIN32_KHR
2691VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
2692 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2693 bool skip_call = false;
2694 {
2695 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002696 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002697 }
2698 if (skip_call) {
2699 return VK_ERROR_VALIDATION_FAILED_EXT;
2700 }
2701 VkResult result =
2702 get_dispatch_table(ot_instance_table_map, instance)->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2703 {
2704 std::lock_guard<std::mutex> lock(global_lock);
2705 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002706 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002707 }
2708 }
2709 return result;
2710}
2711
2712VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,
2713 uint32_t queueFamilyIndex) {
2714 bool skip_call = false;
2715 {
2716 std::lock_guard<std::mutex> lock(global_lock);
2717 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002718 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002719 }
2720 if (skip_call) {
2721 return VK_FALSE;
2722 }
2723 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2724 ->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
2725 return result;
2726}
2727#endif // VK_USE_PLATFORM_WIN32_KHR
2728
2729#ifdef VK_USE_PLATFORM_XCB_KHR
2730VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
2731 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2732 bool skip_call = false;
2733 {
2734 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002735 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002736 }
2737 if (skip_call) {
2738 return VK_ERROR_VALIDATION_FAILED_EXT;
2739 }
2740 VkResult result =
2741 get_dispatch_table(ot_instance_table_map, instance)->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2742 {
2743 std::lock_guard<std::mutex> lock(global_lock);
2744 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002745 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002746 }
2747 }
2748 return result;
2749}
2750
2751VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2752 uint32_t queueFamilyIndex, xcb_connection_t *connection,
2753 xcb_visualid_t visual_id) {
2754 bool skip_call = false;
2755 {
2756 std::lock_guard<std::mutex> lock(global_lock);
2757 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002758 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002759 }
2760 if (skip_call) {
2761 return VK_FALSE;
2762 }
2763 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2764 ->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
2765 return result;
2766}
2767#endif // VK_USE_PLATFORM_XCB_KHR
2768
2769#ifdef VK_USE_PLATFORM_XLIB_KHR
2770VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
2771 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2772 bool skip_call = false;
2773 {
2774 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002775 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002776 }
2777 if (skip_call) {
2778 return VK_ERROR_VALIDATION_FAILED_EXT;
2779 }
2780 VkResult result =
2781 get_dispatch_table(ot_instance_table_map, instance)->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2782 {
2783 std::lock_guard<std::mutex> lock(global_lock);
2784 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002785 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002786 }
2787 }
2788 return result;
2789}
2790
2791VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2792 uint32_t queueFamilyIndex, Display *dpy,
2793 VisualID visualID) {
2794 bool skip_call = false;
2795 {
2796 std::lock_guard<std::mutex> lock(global_lock);
2797 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002798 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002799 }
2800 if (skip_call) {
2801 return VK_FALSE;
2802 }
2803 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2804 ->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
2805 return result;
2806}
2807#endif // VK_USE_PLATFORM_XLIB_KHR
2808
2809#ifdef VK_USE_PLATFORM_MIR_KHR
2810VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
2811 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2812 bool skip_call = false;
2813 {
2814 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002815 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002816 }
2817 if (skip_call) {
2818 return VK_ERROR_VALIDATION_FAILED_EXT;
2819 }
2820 VkResult result =
2821 get_dispatch_table(ot_instance_table_map, instance)->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2822 {
2823 std::lock_guard<std::mutex> lock(global_lock);
2824 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002825 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002826 }
2827 }
2828 return result;
2829}
2830
2831VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2832 uint32_t queueFamilyIndex, MirConnection *connection) {
2833 bool skip_call = false;
2834 {
2835 std::lock_guard<std::mutex> lock(global_lock);
2836 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002837 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002838 }
2839 if (skip_call) {
2840 return VK_FALSE;
2841 }
2842 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2843 ->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
2844 return result;
2845}
2846#endif // VK_USE_PLATFORM_MIR_KHR
2847
2848#ifdef VK_USE_PLATFORM_WAYLAND_KHR
2849VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
2850 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2851 bool skip_call = false;
2852 {
2853 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002854 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002855 }
2856 if (skip_call) {
2857 return VK_ERROR_VALIDATION_FAILED_EXT;
2858 }
2859 VkResult result =
2860 get_dispatch_table(ot_instance_table_map, instance)->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2861 {
2862 std::lock_guard<std::mutex> lock(global_lock);
2863 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002864 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002865 }
2866 }
2867 return result;
2868}
2869
2870VKAPI_ATTR VkBool32 VKAPI_CALL GetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,
2871 uint32_t queueFamilyIndex,
2872 struct wl_display *display) {
2873 bool skip_call = false;
2874 {
2875 std::lock_guard<std::mutex> lock(global_lock);
2876 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13002877 ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002878 }
2879 if (skip_call) {
2880 return VK_FALSE;
2881 }
2882 VkBool32 result = get_dispatch_table(ot_instance_table_map, physicalDevice)
2883 ->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
2884 return result;
2885}
2886#endif // VK_USE_PLATFORM_WAYLAND_KHR
2887
2888#ifdef VK_USE_PLATFORM_ANDROID_KHR
2889VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
2890 const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
2891 bool skip_call = false;
2892 {
2893 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002894 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002895 }
2896 if (skip_call) {
2897 return VK_ERROR_VALIDATION_FAILED_EXT;
2898 }
2899 VkResult result =
2900 get_dispatch_table(ot_instance_table_map, instance)->CreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface);
2901 {
2902 std::lock_guard<std::mutex> lock(global_lock);
2903 if (result == VK_SUCCESS) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002904 CreateObject(instance, *pSurface, VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002905 }
2906 }
2907 return result;
2908}
2909#endif // VK_USE_PLATFORM_ANDROID_KHR
2910
Mark Youngead9b932016-09-08 12:28:38 -06002911VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
2912 const VkSwapchainCreateInfoKHR *pCreateInfos,
2913 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) {
2914 bool skip_call = false;
2915 uint32_t i = 0;
2916 {
2917 std::lock_guard<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13002918 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Youngead9b932016-09-08 12:28:38 -06002919 if (NULL != pCreateInfos) {
2920 for (i = 0; i < swapchainCount; i++) {
Chris Forbes2f271a72016-09-29 14:58:08 +13002921 skip_call |= ValidateObject(device, pCreateInfos[i].oldSwapchain,
Mark Youngead9b932016-09-08 12:28:38 -06002922 VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, true);
2923 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Chris Forbes2f271a72016-09-29 14:58:08 +13002924 skip_call |= ValidateObject(device_data->physical_device, pCreateInfos[i].surface,
Mark Youngead9b932016-09-08 12:28:38 -06002925 VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, false);
2926 }
2927 }
2928 }
2929 if (skip_call) {
2930 return VK_ERROR_VALIDATION_FAILED_EXT;
2931 }
2932 VkResult result =
2933 get_dispatch_table(ot_device_table_map, device)->CreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains);
2934 {
2935 std::lock_guard<std::mutex> lock(global_lock);
2936 if (result == VK_SUCCESS) {
2937 for (i = 0; i < swapchainCount; i++) {
Chris Forbesfeecd402016-09-29 14:53:50 +13002938 CreateObject(device, pSwapchains[i], VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Youngead9b932016-09-08 12:28:38 -06002939 }
2940 }
2941 }
2942 return result;
2943}
2944
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002945VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
2946 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2947 const VkAllocationCallbacks *pAllocator,
2948 VkDebugReportCallbackEXT *pCallback) {
2949 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2950 VkResult result = pInstanceTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback);
2951 if (VK_SUCCESS == result) {
2952 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2953 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pCallback);
Chris Forbesfeecd402016-09-29 14:53:50 +13002954 CreateObject(instance, *pCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002955 }
2956 return result;
2957}
2958
2959VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
2960 const VkAllocationCallbacks *pAllocator) {
2961 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2962 pInstanceTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
2963 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
2964 layer_destroy_msg_callback(instance_data->report_data, msgCallback, pAllocator);
Chris Forbesec461992016-09-29 14:41:44 +13002965 DestroyObject(instance, msgCallback, VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06002966}
2967
2968VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
2969 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
2970 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
2971 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, instance);
2972 pInstanceTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
2973}
2974
2975static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
2976
2977static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
2978 VK_LAYER_API_VERSION, // specVersion
2979 1, // implementationVersion
2980 "LunarG Validation Layer"};
2981
2982VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
2983 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2984}
2985
2986VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
2987 VkLayerProperties *pProperties) {
2988 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
2989}
2990
2991VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
2992 VkExtensionProperties *pProperties) {
2993 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
2994 return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
2995
2996 return VK_ERROR_LAYER_NOT_PRESENT;
2997}
2998
2999VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
3000 uint32_t *pCount, VkExtensionProperties *pProperties) {
3001 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
3002 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
3003
3004 assert(physicalDevice);
3005 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, physicalDevice);
3006 return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
3007}
3008
3009static inline PFN_vkVoidFunction InterceptMsgCallbackGetProcAddrCommand(const char *name, VkInstance instance) {
3010 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
3011 return debug_report_get_instance_proc_addr(instance_data->report_data, name);
3012}
3013
3014static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
3015 VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
3016 if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
3017 return nullptr;
3018
3019 if (!strcmp("vkDestroySurfaceKHR", name))
3020 return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
3021 if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
3022 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
3023 if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
3024 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceCapabilitiesKHR);
3025 if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name))
3026 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceFormatsKHR);
3027 if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name))
3028 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfacePresentModesKHR);
3029
3030#ifdef VK_USE_PLATFORM_WIN32_KHR
3031 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkCreateWin32SurfaceKHR", name))
3032 return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
3033 if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
3034 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
3035#endif // VK_USE_PLATFORM_WIN32_KHR
3036#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003037 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003038 return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003039 if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003040 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
3041#endif // VK_USE_PLATFORM_XCB_KHR
3042#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003043 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003044 return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003045 if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003046 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
3047#endif // VK_USE_PLATFORM_XLIB_KHR
3048#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003049 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003050 return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003051 if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003052 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
3053#endif // VK_USE_PLATFORM_MIR_KHR
3054#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003055 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003056 return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
Mark Lobodzinski38080682016-07-22 15:30:27 -06003057 if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003058 return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
3059#endif // VK_USE_PLATFORM_WAYLAND_KHR
3060#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski38080682016-07-22 15:30:27 -06003061 if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003062 return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
3063#endif // VK_USE_PLATFORM_ANDROID_KHR
3064
3065 return nullptr;
3066}
3067
3068static void CheckDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
3069 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3070 device_data->wsi_enabled = false;
Mark Youngead9b932016-09-08 12:28:38 -06003071 device_data->wsi_display_swapchain_enabled = false;
3072 device_data->objtrack_extensions_enabled = false;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003073
3074 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3075 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
3076 device_data->wsi_enabled = true;
3077 }
Mark Youngead9b932016-09-08 12:28:38 -06003078 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME) == 0) {
3079 device_data->wsi_display_swapchain_enabled = true;
3080 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003081 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0) {
3082 device_data->objtrack_extensions_enabled = true;
3083 }
3084 }
3085}
3086
3087static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
3088 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(ot_instance_table_map, instance);
3089
3090
3091 instanceExtMap[pDisp] = {};
3092
3093 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
3094 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
3095 instanceExtMap[pDisp].wsi_enabled = true;
3096 }
3097#ifdef VK_USE_PLATFORM_XLIB_KHR
3098 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
3099 instanceExtMap[pDisp].xlib_enabled = true;
3100 }
3101#endif
3102#ifdef VK_USE_PLATFORM_XCB_KHR
3103 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
3104 instanceExtMap[pDisp].xcb_enabled = true;
3105 }
3106#endif
3107#ifdef VK_USE_PLATFORM_WAYLAND_KHR
3108 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
3109 instanceExtMap[pDisp].wayland_enabled = true;
3110 }
3111#endif
3112#ifdef VK_USE_PLATFORM_MIR_KHR
3113 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
3114 instanceExtMap[pDisp].mir_enabled = true;
3115 }
3116#endif
3117#ifdef VK_USE_PLATFORM_ANDROID_KHR
3118 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
3119 instanceExtMap[pDisp].android_enabled = true;
3120 }
3121#endif
3122#ifdef VK_USE_PLATFORM_WIN32_KHR
3123 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
3124 instanceExtMap[pDisp].win32_enabled = true;
3125 }
3126#endif
3127 }
3128}
3129
3130VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
3131 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
3132 std::lock_guard<std::mutex> lock(global_lock);
3133 layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3134 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3135
3136 assert(chain_info->u.pLayerInfo);
3137 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3138 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
3139 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(phy_dev_data->instance, "vkCreateDevice");
3140 if (fpCreateDevice == NULL) {
3141 return VK_ERROR_INITIALIZATION_FAILED;
3142 }
3143
3144 // Advance the link info for the next element on the chain
3145 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3146
3147 VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
3148 if (result != VK_SUCCESS) {
3149 return result;
3150 }
3151
3152 layer_data *device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
3153 device_data->report_data = layer_debug_report_create_device(phy_dev_data->report_data, *pDevice);
3154
3155 // Add link back to physDev
3156 device_data->physical_device = physicalDevice;
3157
3158 initDeviceTable(*pDevice, fpGetDeviceProcAddr, ot_device_table_map);
3159
3160 CheckDeviceRegisterExtensions(pCreateInfo, *pDevice);
Chris Forbesfeecd402016-09-29 14:53:50 +13003161 CreateObject(*pDevice, *pDevice, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003162
3163 return result;
3164}
3165
3166VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
3167 uint32_t *pQueueFamilyPropertyCount,
3168 VkQueueFamilyProperties *pQueueFamilyProperties) {
3169 get_dispatch_table(ot_instance_table_map, physicalDevice)
3170 ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3171 std::lock_guard<std::mutex> lock(global_lock);
3172 if (pQueueFamilyProperties != NULL) {
3173 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
3174 for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; i++) {
3175 instance_data->queue_family_properties.emplace_back(pQueueFamilyProperties[i]);
3176 }
3177 }
3178}
3179
3180VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
3181 VkInstance *pInstance) {
3182 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
3183
3184 assert(chain_info->u.pLayerInfo);
3185 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
3186 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
3187 if (fpCreateInstance == NULL) {
3188 return VK_ERROR_INITIALIZATION_FAILED;
3189 }
3190
3191 // Advance the link info for the next element on the chain
3192 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
3193
3194 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
3195 if (result != VK_SUCCESS) {
3196 return result;
3197 }
3198
3199 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
3200 instance_data->instance = *pInstance;
3201 initInstanceTable(*pInstance, fpGetInstanceProcAddr, ot_instance_table_map);
3202 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(ot_instance_table_map, *pInstance);
3203
3204 // Look for one or more debug report create info structures, and copy the
3205 // callback(s) for each one found (for use by vkDestroyInstance)
3206 layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
3207 &instance_data->tmp_callbacks);
3208
3209 instance_data->report_data = debug_report_create_instance(pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
3210 pCreateInfo->ppEnabledExtensionNames);
3211
3212 InitObjectTracker(instance_data, pAllocator);
3213 CheckInstanceRegisterExtensions(pCreateInfo, *pInstance);
3214
Chris Forbesfeecd402016-09-29 14:53:50 +13003215 CreateObject(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003216
3217 return result;
3218}
3219
3220VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
3221 VkPhysicalDevice *pPhysicalDevices) {
3222 bool skip_call = VK_FALSE;
3223 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003224 skip_call |= ValidateObject(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003225 lock.unlock();
3226 if (skip_call) {
3227 return VK_ERROR_VALIDATION_FAILED_EXT;
3228 }
3229 VkResult result = get_dispatch_table(ot_instance_table_map, instance)
3230 ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
3231 lock.lock();
3232 if (result == VK_SUCCESS) {
3233 if (pPhysicalDevices) {
3234 for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06003235 CreateObject(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, nullptr);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003236 }
3237 }
3238 }
3239 lock.unlock();
3240 return result;
3241}
3242
3243VKAPI_ATTR void VKAPI_CALL GetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) {
3244 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003245 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003246 lock.unlock();
3247
3248 get_dispatch_table(ot_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
3249
3250 lock.lock();
3251
3252 CreateQueue(device, *pQueue, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT);
3253 AddQueueInfo(device, queueFamilyIndex, *pQueue);
3254}
3255
3256VKAPI_ATTR void VKAPI_CALL FreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) {
3257 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003258 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003259 lock.unlock();
3260
3261 get_dispatch_table(ot_device_table_map, device)->FreeMemory(device, memory, pAllocator);
3262
3263 lock.lock();
Chris Forbesec461992016-09-29 14:41:44 +13003264 DestroyObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003265}
3266
3267VKAPI_ATTR VkResult VKAPI_CALL MapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size,
3268 VkMemoryMapFlags flags, void **ppData) {
3269 bool skip_call = VK_FALSE;
3270 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003271 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003272 lock.unlock();
3273 if (skip_call == VK_TRUE) {
3274 return VK_ERROR_VALIDATION_FAILED_EXT;
3275 }
3276 VkResult result = get_dispatch_table(ot_device_table_map, device)->MapMemory(device, memory, offset, size, flags, ppData);
3277 return result;
3278}
3279
3280VKAPI_ATTR void VKAPI_CALL UnmapMemory(VkDevice device, VkDeviceMemory memory) {
3281 bool skip_call = VK_FALSE;
3282 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003283 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003284 lock.unlock();
3285 if (skip_call == VK_TRUE) {
3286 return;
3287 }
3288
3289 get_dispatch_table(ot_device_table_map, device)->UnmapMemory(device, memory);
3290}
3291VKAPI_ATTR VkResult VKAPI_CALL QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo,
3292 VkFence fence) {
3293 std::unique_lock<std::mutex> lock(global_lock);
3294 ValidateQueueFlags(queue, "QueueBindSparse");
3295
3296 for (uint32_t i = 0; i < bindInfoCount; i++) {
3297 for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
Chris Forbes2f271a72016-09-29 14:58:08 +13003298 ValidateObject(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003299 false);
3300 for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
Chris Forbes2f271a72016-09-29 14:58:08 +13003301 ValidateObject(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003302 false);
3303 for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
Chris Forbes2f271a72016-09-29 14:58:08 +13003304 ValidateObject(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003305 }
3306 lock.unlock();
3307
3308 VkResult result = get_dispatch_table(ot_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
3309 return result;
3310}
3311
3312VKAPI_ATTR VkResult VKAPI_CALL AllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
3313 VkCommandBuffer *pCommandBuffers) {
3314 bool skip_call = VK_FALSE;
3315 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003316 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003317 skip_call |=
Chris Forbes2f271a72016-09-29 14:58:08 +13003318 ValidateObject(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003319 lock.unlock();
3320
3321 if (skip_call) {
3322 return VK_ERROR_VALIDATION_FAILED_EXT;
3323 }
3324
3325 VkResult result =
3326 get_dispatch_table(ot_device_table_map, device)->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
3327
3328 lock.lock();
3329 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
3330 AllocateCommandBuffer(device, pAllocateInfo->commandPool, pCommandBuffers[i],
3331 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
3332 }
3333 lock.unlock();
3334
3335 return result;
3336}
3337
3338VKAPI_ATTR VkResult VKAPI_CALL AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
3339 VkDescriptorSet *pDescriptorSets) {
3340 bool skip_call = VK_FALSE;
3341 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003342 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3343 skip_call |= ValidateObject(device, pAllocateInfo->descriptorPool,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003344 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3345 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003346 skip_call |= ValidateObject(device, pAllocateInfo->pSetLayouts[i],
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003347 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
3348 }
3349 lock.unlock();
3350 if (skip_call) {
3351 return VK_ERROR_VALIDATION_FAILED_EXT;
3352 }
3353
3354 VkResult result =
3355 get_dispatch_table(ot_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3356
3357 if (VK_SUCCESS == result) {
3358 lock.lock();
3359 for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
3360 AllocateDescriptorSet(device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
3361 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
3362 }
3363 lock.unlock();
3364 }
3365
3366 return result;
3367}
3368
3369VKAPI_ATTR void VKAPI_CALL FreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount,
3370 const VkCommandBuffer *pCommandBuffers) {
3371 bool skip_call = false;
3372 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003373 ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
3374 ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003375 for (uint32_t i = 0; i < commandBufferCount; i++) {
3376 skip_call |= ValidateCommandBuffer(device, commandPool, pCommandBuffers[i]);
3377 }
3378
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003379 for (uint32_t i = 0; i < commandBufferCount; i++) {
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06003380 DestroyObject(device, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, nullptr);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003381 }
3382
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003383 lock.unlock();
3384 if (!skip_call) {
3385 get_dispatch_table(ot_device_table_map, device)
3386 ->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
3387 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003388}
3389VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
3390 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3391 std::unique_lock<std::mutex> lock(global_lock);
3392 // A swapchain's images are implicitly deleted when the swapchain is deleted.
3393 // Remove this swapchain's images from our map of such images.
3394 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr = device_data->swapchainImageMap.begin();
3395 while (itr != device_data->swapchainImageMap.end()) {
3396 OBJTRACK_NODE *pNode = (*itr).second;
3397 if (pNode->parent_object == reinterpret_cast<uint64_t &>(swapchain)) {
3398 delete pNode;
3399 auto delete_item = itr++;
3400 device_data->swapchainImageMap.erase(delete_item);
3401 } else {
3402 ++itr;
3403 }
3404 }
Chris Forbesec461992016-09-29 14:41:44 +13003405 DestroyObject(device, swapchain, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003406 lock.unlock();
3407
3408 get_dispatch_table(ot_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
3409}
3410
3411VKAPI_ATTR VkResult VKAPI_CALL FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3412 const VkDescriptorSet *pDescriptorSets) {
3413 bool skip_call = false;
3414 VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
3415 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003416 skip_call |= ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
3417 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003418 for (uint32_t i = 0; i < descriptorSetCount; i++) {
3419 skip_call |= ValidateDescriptorSet(device, descriptorPool, pDescriptorSets[i]);
3420 }
3421
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003422 for (uint32_t i = 0; i < descriptorSetCount; i++) {
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06003423 DestroyObject(device, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr);
Mark Lobodzinski9bb11542016-07-13 11:29:00 -06003424 }
3425
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003426 lock.unlock();
3427 if (!skip_call) {
3428 result = get_dispatch_table(ot_device_table_map, device)
3429 ->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3430 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003431 return result;
3432}
3433
3434VKAPI_ATTR void VKAPI_CALL DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
3435 const VkAllocationCallbacks *pAllocator) {
3436 bool skip_call = VK_FALSE;
3437 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3438 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003439 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3440 skip_call |= ValidateObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003441 lock.unlock();
3442 if (skip_call) {
3443 return;
3444 }
3445 // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
3446 // Remove this pool's descriptor sets from our descriptorSet map.
3447 lock.lock();
3448 std::unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
3449 device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].begin();
3450 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT].end()) {
3451 OBJTRACK_NODE *pNode = (*itr).second;
3452 auto del_itr = itr++;
3453 if (pNode->parent_object == reinterpret_cast<uint64_t &>(descriptorPool)) {
Chris Forbesec461992016-09-29 14:41:44 +13003454 DestroyObject(device, (VkDescriptorSet)((*del_itr).first),
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06003455 VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, nullptr);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003456 }
3457 }
Chris Forbesec461992016-09-29 14:41:44 +13003458 DestroyObject(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003459 lock.unlock();
3460 get_dispatch_table(ot_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
3461}
3462
3463VKAPI_ATTR void VKAPI_CALL DestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) {
3464 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
3465 bool skip_call = false;
3466 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003467 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3468 skip_call |= ValidateObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003469 lock.unlock();
3470 if (skip_call) {
3471 return;
3472 }
3473 lock.lock();
3474 // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
3475 // Remove this pool's cmdBuffers from our cmd buffer map.
3476 auto itr = device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].begin();
3477 auto del_itr = itr;
3478 while (itr != device_data->object_map[VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT].end()) {
3479 OBJTRACK_NODE *pNode = (*itr).second;
3480 del_itr = itr++;
3481 if (pNode->parent_object == reinterpret_cast<uint64_t &>(commandPool)) {
3482 skip_call |= ValidateCommandBuffer(device, commandPool, reinterpret_cast<VkCommandBuffer>((*del_itr).first));
Chris Forbesec461992016-09-29 14:41:44 +13003483 DestroyObject(device, reinterpret_cast<VkCommandBuffer>((*del_itr).first),
Mark Lobodzinski4dc768c2016-10-03 16:01:12 -06003484 VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, nullptr);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003485 }
3486 }
Chris Forbesec461992016-09-29 14:41:44 +13003487 DestroyObject(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003488 lock.unlock();
3489 get_dispatch_table(ot_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
3490}
3491
3492VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
3493 VkImage *pSwapchainImages) {
3494 bool skip_call = VK_FALSE;
3495 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003496 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003497 lock.unlock();
3498 if (skip_call) {
3499 return VK_ERROR_VALIDATION_FAILED_EXT;
3500 }
3501 VkResult result = get_dispatch_table(ot_device_table_map, device)
3502 ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
3503 if (pSwapchainImages != NULL) {
3504 lock.lock();
3505 for (uint32_t i = 0; i < *pSwapchainImageCount; i++) {
3506 CreateSwapchainImageObject(device, pSwapchainImages[i], swapchain);
3507 }
3508 lock.unlock();
3509 }
3510 return result;
3511}
3512
3513VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3514 const VkGraphicsPipelineCreateInfo *pCreateInfos,
3515 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3516 bool skip_call = VK_FALSE;
3517 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003518 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003519 if (pCreateInfos) {
3520 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3521 if (pCreateInfos[idx0].basePipelineHandle) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003522 skip_call |= ValidateObject(device, pCreateInfos[idx0].basePipelineHandle,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003523 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3524 }
3525 if (pCreateInfos[idx0].layout) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003526 skip_call |= ValidateObject(device, pCreateInfos[idx0].layout,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003527 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3528 }
3529 if (pCreateInfos[idx0].pStages) {
3530 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
3531 if (pCreateInfos[idx0].pStages[idx1].module) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003532 skip_call |= ValidateObject(device, pCreateInfos[idx0].pStages[idx1].module,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003533 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3534 }
3535 }
3536 }
3537 if (pCreateInfos[idx0].renderPass) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003538 skip_call |= ValidateObject(device, pCreateInfos[idx0].renderPass,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003539 VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
3540 }
3541 }
3542 }
3543 if (pipelineCache) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003544 skip_call |= ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003545 }
3546 lock.unlock();
3547 if (skip_call) {
3548 return VK_ERROR_VALIDATION_FAILED_EXT;
3549 }
3550 VkResult result = get_dispatch_table(ot_device_table_map, device)
3551 ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3552 lock.lock();
3553 if (result == VK_SUCCESS) {
3554 for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
Chris Forbesfeecd402016-09-29 14:53:50 +13003555 CreateObject(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003556 }
3557 }
3558 lock.unlock();
3559 return result;
3560}
3561
3562VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
3563 const VkComputePipelineCreateInfo *pCreateInfos,
3564 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
3565 bool skip_call = VK_FALSE;
3566 std::unique_lock<std::mutex> lock(global_lock);
Chris Forbes2f271a72016-09-29 14:58:08 +13003567 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003568 if (pCreateInfos) {
3569 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
3570 if (pCreateInfos[idx0].basePipelineHandle) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003571 skip_call |= ValidateObject(device, pCreateInfos[idx0].basePipelineHandle,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003572 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
3573 }
3574 if (pCreateInfos[idx0].layout) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003575 skip_call |= ValidateObject(device, pCreateInfos[idx0].layout,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003576 VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
3577 }
3578 if (pCreateInfos[idx0].stage.module) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003579 skip_call |= ValidateObject(device, pCreateInfos[idx0].stage.module,
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003580 VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
3581 }
3582 }
3583 }
3584 if (pipelineCache) {
Chris Forbes2f271a72016-09-29 14:58:08 +13003585 skip_call |= ValidateObject(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003586 }
3587 lock.unlock();
3588 if (skip_call) {
3589 return VK_ERROR_VALIDATION_FAILED_EXT;
3590 }
3591 VkResult result = get_dispatch_table(ot_device_table_map, device)
3592 ->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3593 lock.lock();
3594 if (result == VK_SUCCESS) {
3595 for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
Chris Forbesfeecd402016-09-29 14:53:50 +13003596 CreateObject(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pAllocator);
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003597 }
3598 }
3599 lock.unlock();
3600 return result;
3601}
3602
Mark Lobodzinski82db45e2016-09-28 12:45:29 -06003603// VK_EXT_debug_marker Extension
3604VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(VkDevice device, VkDebugMarkerObjectTagInfoEXT *pTagInfo) {
3605 bool skip_call = VK_FALSE;
3606 std::unique_lock<std::mutex> lock(global_lock);
3607 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3608 lock.unlock();
3609 if (skip_call) {
3610 return VK_ERROR_VALIDATION_FAILED_EXT;
3611 }
3612 VkResult result = get_dispatch_table(ot_device_table_map, device)->DebugMarkerSetObjectTagEXT(device, pTagInfo);
3613 return result;
3614}
3615
3616VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(VkDevice device, VkDebugMarkerObjectNameInfoEXT *pNameInfo) {
3617 bool skip_call = VK_FALSE;
3618 std::unique_lock<std::mutex> lock(global_lock);
3619 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3620 lock.unlock();
3621 if (skip_call) {
3622 return VK_ERROR_VALIDATION_FAILED_EXT;
3623 }
3624 VkResult result = get_dispatch_table(ot_device_table_map, device)->DebugMarkerSetObjectNameEXT(device, pNameInfo);
3625 return result;
3626}
3627
3628VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(VkCommandBuffer commandBuffer, VkDebugMarkerMarkerInfoEXT *pMarkerInfo) {
3629 bool skip_call = VK_FALSE;
3630 std::unique_lock<std::mutex> lock(global_lock);
3631 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
3632 lock.unlock();
3633 if (!skip_call) {
3634 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
3635 }
3636}
3637
3638VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(VkCommandBuffer commandBuffer) {
3639 bool skip_call = VK_FALSE;
3640 std::unique_lock<std::mutex> lock(global_lock);
3641 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
3642 lock.unlock();
3643 if (!skip_call) {
3644 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerEndEXT(commandBuffer);
3645 }
3646}
3647
3648VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(VkCommandBuffer commandBuffer, VkDebugMarkerMarkerInfoEXT *pMarkerInfo) {
3649 bool skip_call = VK_FALSE;
3650 std::unique_lock<std::mutex> lock(global_lock);
3651 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, false);
3652 lock.unlock();
3653 if (!skip_call) {
3654 get_dispatch_table(ot_device_table_map, commandBuffer)->CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
3655 }
3656}
3657
Mark Lobodzinski5fcb92b2016-09-28 12:48:56 -06003658// VK_NV_external_memory_capabilities Extension
3659VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceExternalImageFormatPropertiesNV(
3660 VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage,
3661 VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType,
3662 VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) {
3663
3664 bool skip_call = false;
3665 {
3666 std::lock_guard<std::mutex> lock(global_lock);
3667 skip_call |= ValidateObject(physicalDevice, physicalDevice, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, false);
3668 }
3669 if (skip_call) {
3670 return VK_ERROR_VALIDATION_FAILED_EXT;
3671 }
3672 VkResult result = get_dispatch_table(ot_instance_table_map, physicalDevice)
3673 ->GetPhysicalDeviceExternalImageFormatPropertiesNV(physicalDevice, format, type, tiling, usage, flags,
3674 externalHandleType, pExternalImageFormatProperties);
3675 return result;
3676}
3677
Mark Lobodzinski8cc72bd2016-09-28 12:53:27 -06003678#ifdef VK_USE_PLATFORM_WIN32_KHR
3679// VK_NV_external_memory_win32 Extension
3680VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory,
3681 VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE *pHandle) {
3682 bool skip_call = VK_FALSE;
3683 std::unique_lock<std::mutex> lock(global_lock);
3684 skip_call |= ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3685 skip_call |= ValidateObject(device, memory, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, false);
3686 lock.unlock();
3687 if (skip_call) {
3688 return VK_ERROR_VALIDATION_FAILED_EXT;
3689 }
3690 VkResult result = get_dispatch_table(ot_device_table_map, device)->GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
3691 return result;
3692}
3693#endif // VK_USE_PLATFORM_WIN32_KHR
3694
Mark Lobodzinski4e0003a2016-09-28 12:58:00 -06003695// VK_AMD_draw_indirect_count Extension
3696VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3697 VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
3698 uint32_t stride) {
3699 bool skip_call = VK_FALSE;
3700 std::unique_lock<std::mutex> lock(global_lock);
3701 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3702 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
3703 lock.unlock();
3704 if (!skip_call) {
3705 get_dispatch_table(ot_device_table_map, commandBuffer)
3706 ->CmdDrawIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3707 }
3708}
3709
3710VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
3711 VkBuffer countBuffer, VkDeviceSize countBufferOffset,
3712 uint32_t maxDrawCount, uint32_t stride) {
3713 bool skip_call = VK_FALSE;
3714 std::unique_lock<std::mutex> lock(global_lock);
3715 skip_call |= ValidateObject(commandBuffer, commandBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
3716 skip_call |= ValidateObject(commandBuffer, buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
3717 lock.unlock();
3718 if (!skip_call) {
3719 get_dispatch_table(ot_device_table_map, commandBuffer)
3720 ->CmdDrawIndexedIndirectCountAMD(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
3721 }
3722}
3723
3724
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003725static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
3726 if (!name || name[0] != 'v' || name[1] != 'k')
3727 return NULL;
3728
3729 name += 2;
3730 if (!strcmp(name, "GetDeviceProcAddr"))
3731 return (PFN_vkVoidFunction)GetDeviceProcAddr;
3732 if (!strcmp(name, "DestroyDevice"))
3733 return (PFN_vkVoidFunction)DestroyDevice;
3734 if (!strcmp(name, "GetDeviceQueue"))
3735 return (PFN_vkVoidFunction)GetDeviceQueue;
3736 if (!strcmp(name, "QueueSubmit"))
3737 return (PFN_vkVoidFunction)QueueSubmit;
3738 if (!strcmp(name, "QueueWaitIdle"))
3739 return (PFN_vkVoidFunction)QueueWaitIdle;
3740 if (!strcmp(name, "DeviceWaitIdle"))
3741 return (PFN_vkVoidFunction)DeviceWaitIdle;
3742 if (!strcmp(name, "AllocateMemory"))
3743 return (PFN_vkVoidFunction)AllocateMemory;
3744 if (!strcmp(name, "FreeMemory"))
3745 return (PFN_vkVoidFunction)FreeMemory;
3746 if (!strcmp(name, "MapMemory"))
3747 return (PFN_vkVoidFunction)MapMemory;
3748 if (!strcmp(name, "UnmapMemory"))
3749 return (PFN_vkVoidFunction)UnmapMemory;
3750 if (!strcmp(name, "FlushMappedMemoryRanges"))
3751 return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
3752 if (!strcmp(name, "InvalidateMappedMemoryRanges"))
3753 return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
3754 if (!strcmp(name, "GetDeviceMemoryCommitment"))
3755 return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
3756 if (!strcmp(name, "BindBufferMemory"))
3757 return (PFN_vkVoidFunction)BindBufferMemory;
3758 if (!strcmp(name, "BindImageMemory"))
3759 return (PFN_vkVoidFunction)BindImageMemory;
3760 if (!strcmp(name, "GetBufferMemoryRequirements"))
3761 return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
3762 if (!strcmp(name, "GetImageMemoryRequirements"))
3763 return (PFN_vkVoidFunction)GetImageMemoryRequirements;
3764 if (!strcmp(name, "GetImageSparseMemoryRequirements"))
3765 return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
3766 if (!strcmp(name, "QueueBindSparse"))
3767 return (PFN_vkVoidFunction)QueueBindSparse;
3768 if (!strcmp(name, "CreateFence"))
3769 return (PFN_vkVoidFunction)CreateFence;
3770 if (!strcmp(name, "DestroyFence"))
3771 return (PFN_vkVoidFunction)DestroyFence;
3772 if (!strcmp(name, "ResetFences"))
3773 return (PFN_vkVoidFunction)ResetFences;
3774 if (!strcmp(name, "GetFenceStatus"))
3775 return (PFN_vkVoidFunction)GetFenceStatus;
3776 if (!strcmp(name, "WaitForFences"))
3777 return (PFN_vkVoidFunction)WaitForFences;
3778 if (!strcmp(name, "CreateSemaphore"))
3779 return (PFN_vkVoidFunction)CreateSemaphore;
3780 if (!strcmp(name, "DestroySemaphore"))
3781 return (PFN_vkVoidFunction)DestroySemaphore;
3782 if (!strcmp(name, "CreateEvent"))
3783 return (PFN_vkVoidFunction)CreateEvent;
3784 if (!strcmp(name, "DestroyEvent"))
3785 return (PFN_vkVoidFunction)DestroyEvent;
3786 if (!strcmp(name, "GetEventStatus"))
3787 return (PFN_vkVoidFunction)GetEventStatus;
3788 if (!strcmp(name, "SetEvent"))
3789 return (PFN_vkVoidFunction)SetEvent;
3790 if (!strcmp(name, "ResetEvent"))
3791 return (PFN_vkVoidFunction)ResetEvent;
3792 if (!strcmp(name, "CreateQueryPool"))
3793 return (PFN_vkVoidFunction)CreateQueryPool;
3794 if (!strcmp(name, "DestroyQueryPool"))
3795 return (PFN_vkVoidFunction)DestroyQueryPool;
3796 if (!strcmp(name, "GetQueryPoolResults"))
3797 return (PFN_vkVoidFunction)GetQueryPoolResults;
3798 if (!strcmp(name, "CreateBuffer"))
3799 return (PFN_vkVoidFunction)CreateBuffer;
3800 if (!strcmp(name, "DestroyBuffer"))
3801 return (PFN_vkVoidFunction)DestroyBuffer;
3802 if (!strcmp(name, "CreateBufferView"))
3803 return (PFN_vkVoidFunction)CreateBufferView;
3804 if (!strcmp(name, "DestroyBufferView"))
3805 return (PFN_vkVoidFunction)DestroyBufferView;
3806 if (!strcmp(name, "CreateImage"))
3807 return (PFN_vkVoidFunction)CreateImage;
3808 if (!strcmp(name, "DestroyImage"))
3809 return (PFN_vkVoidFunction)DestroyImage;
3810 if (!strcmp(name, "GetImageSubresourceLayout"))
3811 return (PFN_vkVoidFunction)GetImageSubresourceLayout;
3812 if (!strcmp(name, "CreateImageView"))
3813 return (PFN_vkVoidFunction)CreateImageView;
3814 if (!strcmp(name, "DestroyImageView"))
3815 return (PFN_vkVoidFunction)DestroyImageView;
3816 if (!strcmp(name, "CreateShaderModule"))
3817 return (PFN_vkVoidFunction)CreateShaderModule;
3818 if (!strcmp(name, "DestroyShaderModule"))
3819 return (PFN_vkVoidFunction)DestroyShaderModule;
3820 if (!strcmp(name, "CreatePipelineCache"))
3821 return (PFN_vkVoidFunction)CreatePipelineCache;
3822 if (!strcmp(name, "DestroyPipelineCache"))
3823 return (PFN_vkVoidFunction)DestroyPipelineCache;
3824 if (!strcmp(name, "GetPipelineCacheData"))
3825 return (PFN_vkVoidFunction)GetPipelineCacheData;
3826 if (!strcmp(name, "MergePipelineCaches"))
3827 return (PFN_vkVoidFunction)MergePipelineCaches;
3828 if (!strcmp(name, "CreateGraphicsPipelines"))
3829 return (PFN_vkVoidFunction)CreateGraphicsPipelines;
3830 if (!strcmp(name, "CreateComputePipelines"))
3831 return (PFN_vkVoidFunction)CreateComputePipelines;
3832 if (!strcmp(name, "DestroyPipeline"))
3833 return (PFN_vkVoidFunction)DestroyPipeline;
3834 if (!strcmp(name, "CreatePipelineLayout"))
3835 return (PFN_vkVoidFunction)CreatePipelineLayout;
3836 if (!strcmp(name, "DestroyPipelineLayout"))
3837 return (PFN_vkVoidFunction)DestroyPipelineLayout;
3838 if (!strcmp(name, "CreateSampler"))
3839 return (PFN_vkVoidFunction)CreateSampler;
3840 if (!strcmp(name, "DestroySampler"))
3841 return (PFN_vkVoidFunction)DestroySampler;
3842 if (!strcmp(name, "CreateDescriptorSetLayout"))
3843 return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
3844 if (!strcmp(name, "DestroyDescriptorSetLayout"))
3845 return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
3846 if (!strcmp(name, "CreateDescriptorPool"))
3847 return (PFN_vkVoidFunction)CreateDescriptorPool;
3848 if (!strcmp(name, "DestroyDescriptorPool"))
3849 return (PFN_vkVoidFunction)DestroyDescriptorPool;
3850 if (!strcmp(name, "ResetDescriptorPool"))
3851 return (PFN_vkVoidFunction)ResetDescriptorPool;
3852 if (!strcmp(name, "AllocateDescriptorSets"))
3853 return (PFN_vkVoidFunction)AllocateDescriptorSets;
3854 if (!strcmp(name, "FreeDescriptorSets"))
3855 return (PFN_vkVoidFunction)FreeDescriptorSets;
3856 if (!strcmp(name, "UpdateDescriptorSets"))
3857 return (PFN_vkVoidFunction)UpdateDescriptorSets;
3858 if (!strcmp(name, "CreateFramebuffer"))
3859 return (PFN_vkVoidFunction)CreateFramebuffer;
3860 if (!strcmp(name, "DestroyFramebuffer"))
3861 return (PFN_vkVoidFunction)DestroyFramebuffer;
3862 if (!strcmp(name, "CreateRenderPass"))
3863 return (PFN_vkVoidFunction)CreateRenderPass;
3864 if (!strcmp(name, "DestroyRenderPass"))
3865 return (PFN_vkVoidFunction)DestroyRenderPass;
3866 if (!strcmp(name, "GetRenderAreaGranularity"))
3867 return (PFN_vkVoidFunction)GetRenderAreaGranularity;
3868 if (!strcmp(name, "CreateCommandPool"))
3869 return (PFN_vkVoidFunction)CreateCommandPool;
3870 if (!strcmp(name, "DestroyCommandPool"))
3871 return (PFN_vkVoidFunction)DestroyCommandPool;
3872 if (!strcmp(name, "ResetCommandPool"))
3873 return (PFN_vkVoidFunction)ResetCommandPool;
3874 if (!strcmp(name, "AllocateCommandBuffers"))
3875 return (PFN_vkVoidFunction)AllocateCommandBuffers;
3876 if (!strcmp(name, "FreeCommandBuffers"))
3877 return (PFN_vkVoidFunction)FreeCommandBuffers;
3878 if (!strcmp(name, "BeginCommandBuffer"))
3879 return (PFN_vkVoidFunction)BeginCommandBuffer;
3880 if (!strcmp(name, "EndCommandBuffer"))
3881 return (PFN_vkVoidFunction)EndCommandBuffer;
3882 if (!strcmp(name, "ResetCommandBuffer"))
3883 return (PFN_vkVoidFunction)ResetCommandBuffer;
3884 if (!strcmp(name, "CmdBindPipeline"))
3885 return (PFN_vkVoidFunction)CmdBindPipeline;
3886 if (!strcmp(name, "CmdSetViewport"))
3887 return (PFN_vkVoidFunction)CmdSetViewport;
3888 if (!strcmp(name, "CmdSetScissor"))
3889 return (PFN_vkVoidFunction)CmdSetScissor;
3890 if (!strcmp(name, "CmdSetLineWidth"))
3891 return (PFN_vkVoidFunction)CmdSetLineWidth;
3892 if (!strcmp(name, "CmdSetDepthBias"))
3893 return (PFN_vkVoidFunction)CmdSetDepthBias;
3894 if (!strcmp(name, "CmdSetBlendConstants"))
3895 return (PFN_vkVoidFunction)CmdSetBlendConstants;
3896 if (!strcmp(name, "CmdSetDepthBounds"))
3897 return (PFN_vkVoidFunction)CmdSetDepthBounds;
3898 if (!strcmp(name, "CmdSetStencilCompareMask"))
3899 return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
3900 if (!strcmp(name, "CmdSetStencilWriteMask"))
3901 return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
3902 if (!strcmp(name, "CmdSetStencilReference"))
3903 return (PFN_vkVoidFunction)CmdSetStencilReference;
3904 if (!strcmp(name, "CmdBindDescriptorSets"))
3905 return (PFN_vkVoidFunction)CmdBindDescriptorSets;
3906 if (!strcmp(name, "CmdBindIndexBuffer"))
3907 return (PFN_vkVoidFunction)CmdBindIndexBuffer;
3908 if (!strcmp(name, "CmdBindVertexBuffers"))
3909 return (PFN_vkVoidFunction)CmdBindVertexBuffers;
3910 if (!strcmp(name, "CmdDraw"))
3911 return (PFN_vkVoidFunction)CmdDraw;
3912 if (!strcmp(name, "CmdDrawIndexed"))
3913 return (PFN_vkVoidFunction)CmdDrawIndexed;
3914 if (!strcmp(name, "CmdDrawIndirect"))
3915 return (PFN_vkVoidFunction)CmdDrawIndirect;
3916 if (!strcmp(name, "CmdDrawIndexedIndirect"))
3917 return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
3918 if (!strcmp(name, "CmdDispatch"))
3919 return (PFN_vkVoidFunction)CmdDispatch;
3920 if (!strcmp(name, "CmdDispatchIndirect"))
3921 return (PFN_vkVoidFunction)CmdDispatchIndirect;
3922 if (!strcmp(name, "CmdCopyBuffer"))
3923 return (PFN_vkVoidFunction)CmdCopyBuffer;
3924 if (!strcmp(name, "CmdCopyImage"))
3925 return (PFN_vkVoidFunction)CmdCopyImage;
3926 if (!strcmp(name, "CmdBlitImage"))
3927 return (PFN_vkVoidFunction)CmdBlitImage;
3928 if (!strcmp(name, "CmdCopyBufferToImage"))
3929 return (PFN_vkVoidFunction)CmdCopyBufferToImage;
3930 if (!strcmp(name, "CmdCopyImageToBuffer"))
3931 return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
3932 if (!strcmp(name, "CmdUpdateBuffer"))
3933 return (PFN_vkVoidFunction)CmdUpdateBuffer;
3934 if (!strcmp(name, "CmdFillBuffer"))
3935 return (PFN_vkVoidFunction)CmdFillBuffer;
3936 if (!strcmp(name, "CmdClearColorImage"))
3937 return (PFN_vkVoidFunction)CmdClearColorImage;
3938 if (!strcmp(name, "CmdClearDepthStencilImage"))
3939 return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
3940 if (!strcmp(name, "CmdClearAttachments"))
3941 return (PFN_vkVoidFunction)CmdClearAttachments;
3942 if (!strcmp(name, "CmdResolveImage"))
3943 return (PFN_vkVoidFunction)CmdResolveImage;
3944 if (!strcmp(name, "CmdSetEvent"))
3945 return (PFN_vkVoidFunction)CmdSetEvent;
3946 if (!strcmp(name, "CmdResetEvent"))
3947 return (PFN_vkVoidFunction)CmdResetEvent;
3948 if (!strcmp(name, "CmdWaitEvents"))
3949 return (PFN_vkVoidFunction)CmdWaitEvents;
3950 if (!strcmp(name, "CmdPipelineBarrier"))
3951 return (PFN_vkVoidFunction)CmdPipelineBarrier;
3952 if (!strcmp(name, "CmdBeginQuery"))
3953 return (PFN_vkVoidFunction)CmdBeginQuery;
3954 if (!strcmp(name, "CmdEndQuery"))
3955 return (PFN_vkVoidFunction)CmdEndQuery;
3956 if (!strcmp(name, "CmdResetQueryPool"))
3957 return (PFN_vkVoidFunction)CmdResetQueryPool;
3958 if (!strcmp(name, "CmdWriteTimestamp"))
3959 return (PFN_vkVoidFunction)CmdWriteTimestamp;
3960 if (!strcmp(name, "CmdCopyQueryPoolResults"))
3961 return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
3962 if (!strcmp(name, "CmdPushConstants"))
3963 return (PFN_vkVoidFunction)CmdPushConstants;
3964 if (!strcmp(name, "CmdBeginRenderPass"))
3965 return (PFN_vkVoidFunction)CmdBeginRenderPass;
3966 if (!strcmp(name, "CmdNextSubpass"))
3967 return (PFN_vkVoidFunction)CmdNextSubpass;
3968 if (!strcmp(name, "CmdEndRenderPass"))
3969 return (PFN_vkVoidFunction)CmdEndRenderPass;
3970 if (!strcmp(name, "CmdExecuteCommands"))
3971 return (PFN_vkVoidFunction)CmdExecuteCommands;
Mark Lobodzinski82db45e2016-09-28 12:45:29 -06003972 if (!strcmp(name, "DebugMarkerSetObjectTagEXT"))
3973 return (PFN_vkVoidFunction)DebugMarkerSetObjectTagEXT;
3974 if (!strcmp(name, "DebugMarkerSetObjectNameEXT"))
3975 return (PFN_vkVoidFunction)DebugMarkerSetObjectNameEXT;
3976 if (!strcmp(name, "CmdDebugMarkerBeginEXT"))
3977 return (PFN_vkVoidFunction)CmdDebugMarkerBeginEXT;
3978 if (!strcmp(name, "CmdDebugMarkerEndEXT"))
3979 return (PFN_vkVoidFunction)CmdDebugMarkerEndEXT;
3980 if (!strcmp(name, "CmdDebugMarkerInsertEXT"))
3981 return (PFN_vkVoidFunction)CmdDebugMarkerInsertEXT;
Mark Lobodzinski8cc72bd2016-09-28 12:53:27 -06003982#ifdef VK_USE_PLATFORM_WIN32_KHR
3983 if (!strcmp(name, "GetMemoryWin32HandleNV"))
3984 return (PFN_vkVoidFunction)GetMemoryWin32HandleNV;
3985#endif // VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski4e0003a2016-09-28 12:58:00 -06003986 if (!strcmp(name, "CmdDrawIndirectCountAMD"))
3987 return (PFN_vkVoidFunction)CmdDrawIndirectCountAMD;
3988 if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD"))
3989 return (PFN_vkVoidFunction)CmdDrawIndexedIndirectCountAMD;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06003990
3991 return NULL;
3992}
3993static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
3994 if (!name || name[0] != 'v' || name[1] != 'k')
3995 return NULL;
3996
3997 name += 2;
3998 if (!strcmp(name, "CreateInstance"))
3999 return (PFN_vkVoidFunction)CreateInstance;
4000 if (!strcmp(name, "DestroyInstance"))
4001 return (PFN_vkVoidFunction)DestroyInstance;
4002 if (!strcmp(name, "EnumeratePhysicalDevices"))
4003 return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
4004 if (!strcmp(name, "GetPhysicalDeviceFeatures"))
4005 return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
4006 if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
4007 return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
4008 if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
4009 return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
4010 if (!strcmp(name, "GetPhysicalDeviceProperties"))
4011 return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
4012 if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
4013 return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
4014 if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
4015 return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
4016 if (!strcmp(name, "GetInstanceProcAddr"))
4017 return (PFN_vkVoidFunction)GetInstanceProcAddr;
4018 if (!strcmp(name, "CreateDevice"))
4019 return (PFN_vkVoidFunction)CreateDevice;
4020 if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
4021 return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
4022 if (!strcmp(name, "EnumerateInstanceLayerProperties"))
4023 return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
4024 if (!strcmp(name, "EnumerateDeviceLayerProperties"))
4025 return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
4026 if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
4027 return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
Mark Lobodzinski5fcb92b2016-09-28 12:48:56 -06004028 if (!strcmp(name, "GetPhysicalDeviceExternalImageFormatPropertiesNV"))
4029 return (PFN_vkVoidFunction)GetPhysicalDeviceExternalImageFormatPropertiesNV;
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06004030
4031 return NULL;
4032}
4033
4034static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkDevice device) {
4035 if (device) {
4036 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
Mark Youngead9b932016-09-08 12:28:38 -06004037
4038 if (device_data->wsi_enabled) {
4039 if (!strcmp("vkCreateSwapchainKHR", name))
4040 return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
4041 if (!strcmp("vkDestroySwapchainKHR", name))
4042 return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
4043 if (!strcmp("vkGetSwapchainImagesKHR", name))
4044 return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
4045 if (!strcmp("vkAcquireNextImageKHR", name))
4046 return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
4047 if (!strcmp("vkQueuePresentKHR", name))
4048 return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
4049 }
4050
4051 if (device_data->wsi_display_swapchain_enabled) {
4052 if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
4053 return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
4054 }
4055 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06004056 }
Mark Lobodzinski9bab8662016-07-01 10:53:31 -06004057
4058 return nullptr;
4059}
4060
4061VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
4062 PFN_vkVoidFunction addr;
4063 addr = InterceptCoreDeviceCommand(funcName);
4064 if (addr) {
4065 return addr;
4066 }
4067 assert(device);
4068
4069 addr = InterceptWsiEnabledCommand(funcName, device);
4070 if (addr) {
4071 return addr;
4072 }
4073 if (get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr == NULL) {
4074 return NULL;
4075 }
4076 return get_dispatch_table(ot_device_table_map, device)->GetDeviceProcAddr(device, funcName);
4077}
4078
4079VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
4080 PFN_vkVoidFunction addr;
4081 addr = InterceptCoreInstanceCommand(funcName);
4082 if (!addr) {
4083 addr = InterceptCoreDeviceCommand(funcName);
4084 }
4085 if (!addr) {
4086 addr = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
4087 }
4088 if (addr) {
4089 return addr;
4090 }
4091 assert(instance);
4092
4093 addr = InterceptMsgCallbackGetProcAddrCommand(funcName, instance);
4094 if (addr) {
4095 return addr;
4096 }
4097 addr = InterceptWsiEnabledCommand(funcName, instance);
4098 if (addr) {
4099 return addr;
4100 }
4101 if (get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr == NULL) {
4102 return NULL;
4103 }
4104 return get_dispatch_table(ot_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
4105}
4106
4107} // namespace object_tracker
4108
4109// vk_layer_logging.h expects these to be defined
4110VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
4111 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
4112 const VkAllocationCallbacks *pAllocator,
4113 VkDebugReportCallbackEXT *pMsgCallback) {
4114 return object_tracker::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
4115}
4116
4117VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
4118 const VkAllocationCallbacks *pAllocator) {
4119 object_tracker::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
4120}
4121
4122VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
4123 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
4124 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
4125 object_tracker::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
4126}
4127
4128// Loader-layer interface v0, just wrappers since there is only a layer
4129VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
4130 VkExtensionProperties *pProperties) {
4131 return object_tracker::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
4132}
4133
4134VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
4135 VkLayerProperties *pProperties) {
4136 return object_tracker::EnumerateInstanceLayerProperties(pCount, pProperties);
4137}
4138
4139VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
4140 VkLayerProperties *pProperties) {
4141 // The layer command handles VK_NULL_HANDLE just fine internally
4142 assert(physicalDevice == VK_NULL_HANDLE);
4143 return object_tracker::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
4144}
4145
4146VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
4147 return object_tracker::GetDeviceProcAddr(dev, funcName);
4148}
4149
4150VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
4151 return object_tracker::GetInstanceProcAddr(instance, funcName);
4152}
4153
4154VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
4155 const char *pLayerName, uint32_t *pCount,
4156 VkExtensionProperties *pProperties) {
4157 // The layer command handles VK_NULL_HANDLE just fine internally
4158 assert(physicalDevice == VK_NULL_HANDLE);
4159 return object_tracker::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
Mark Lobodzinski38080682016-07-22 15:30:27 -06004160}