blob: 0d81c80963540fbe563f077cfb41560a94bb6b6d [file] [log] [blame]
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Tobin Ehlis <tobine@google.com>
20 * Author: Mark Lobodzinski <mark@lunarg.com>
21 */
22
23#include <stdio.h>
24#include <stdlib.h>
25#include <string.h>
26#include <unordered_map>
27#include <vector>
28#include <list>
29#include <memory>
30
31#include "vk_loader_platform.h"
32#include "vulkan/vk_layer.h"
33#include "vk_layer_config.h"
34#include "vk_layer_extension_utils.h"
35#include "vk_layer_utils.h"
36#include "vk_layer_table.h"
37#include "vk_layer_logging.h"
38#include "unique_objects.h"
39#include "vk_dispatch_table_helper.h"
40#include "vk_struct_string_helper_cpp.h"
41#include "vk_layer_data.h"
42#include "vk_layer_utils.h"
43
Mike Stroyanb985fca2016-11-01 11:50:16 -060044// This intentionally includes a cpp file
45#include "vk_safe_struct.cpp"
46
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060047#include "unique_objects_wrappers.h"
48
49namespace unique_objects {
50
51static void initUniqueObjects(layer_data *instance_data, const VkAllocationCallbacks *pAllocator) {
52 layer_debug_actions(instance_data->report_data, instance_data->logging_callback, pAllocator, "google_unique_objects");
53}
54
55// Handle CreateInstance Extensions
56static void checkInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
57 uint32_t i;
58 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
59 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
60 instance_ext_map[disp_table] = {};
61
62 for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
63 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
64 instance_ext_map[disp_table].wsi_enabled = true;
65 }
66 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
67 instance_ext_map[disp_table].display_enabled = true;
68 }
69#ifdef VK_USE_PLATFORM_XLIB_KHR
70 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
71 instance_ext_map[disp_table].xlib_enabled = true;
72 }
73#endif
74#ifdef VK_USE_PLATFORM_XCB_KHR
75 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
76 instance_ext_map[disp_table].xcb_enabled = true;
77 }
78#endif
79#ifdef VK_USE_PLATFORM_WAYLAND_KHR
80 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
81 instance_ext_map[disp_table].wayland_enabled = true;
82 }
83#endif
84#ifdef VK_USE_PLATFORM_MIR_KHR
85 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
86 instance_ext_map[disp_table].mir_enabled = true;
87 }
88#endif
89#ifdef VK_USE_PLATFORM_ANDROID_KHR
90 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
91 instance_ext_map[disp_table].android_enabled = true;
92 }
93#endif
94#ifdef VK_USE_PLATFORM_WIN32_KHR
95 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
96 instance_ext_map[disp_table].win32_enabled = true;
97 }
98#endif
99
100 // Check for recognized instance extensions
101 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
102 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedInstanceExtensions)) {
103 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
104 0, "UniqueObjects",
105 "Instance Extension %s is not supported by this layer. Using this extension may adversely affect "
106 "validation results and/or produce undefined behavior.",
107 pCreateInfo->ppEnabledExtensionNames[i]);
108 }
109 }
110}
111
112// Handle CreateDevice Extensions
113static void createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
114 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
115 VkLayerDispatchTable *disp_table = device_data->device_dispatch_table;
116 PFN_vkGetDeviceProcAddr gpa = disp_table->GetDeviceProcAddr;
117
118 device_data->device_dispatch_table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
119 disp_table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
120 disp_table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
121 disp_table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
122 disp_table->QueuePresentKHR = (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
123 device_data->wsi_enabled = false;
124
125 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
126 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
127 device_data->wsi_enabled = true;
128 }
129 // Check for recognized device extensions
130 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedDeviceExtensions)) {
131 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
132 0, "UniqueObjects",
133 "Device Extension %s is not supported by this layer. Using this extension may adversely affect "
134 "validation results and/or produce undefined behavior.",
135 pCreateInfo->ppEnabledExtensionNames[i]);
136 }
137 }
138}
139
140VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
141 VkInstance *pInstance) {
142 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
143
144 assert(chain_info->u.pLayerInfo);
145 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
146 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
147 if (fpCreateInstance == NULL) {
148 return VK_ERROR_INITIALIZATION_FAILED;
149 }
150
151 // Advance the link info for the next element on the chain
152 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
153
154 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
155 if (result != VK_SUCCESS) {
156 return result;
157 }
158
159 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
160 instance_data->instance = *pInstance;
161 instance_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
162 layer_init_instance_dispatch_table(*pInstance, instance_data->instance_dispatch_table, fpGetInstanceProcAddr);
163
164 instance_data->instance = *pInstance;
165 instance_data->report_data =
166 debug_report_create_instance(instance_data->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount,
167 pCreateInfo->ppEnabledExtensionNames);
168
169 // Set up temporary debug callbacks to output messages at CreateInstance-time
170 if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
171 &instance_data->tmp_callbacks)) {
172 if (instance_data->num_tmp_callbacks > 0) {
173 if (layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
174 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
175 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
176 instance_data->num_tmp_callbacks = 0;
177 }
178 }
179 }
180
181 initUniqueObjects(instance_data, pAllocator);
182 checkInstanceRegisterExtensions(pCreateInfo, *pInstance);
183
184 // Disable and free tmp callbacks, no longer necessary
185 if (instance_data->num_tmp_callbacks > 0) {
186 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
187 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
188 instance_data->num_tmp_callbacks = 0;
189 }
190
191 return result;
192}
193
194VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
195 dispatch_key key = get_dispatch_key(instance);
196 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
197 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
198 instance_ext_map.erase(disp_table);
199 disp_table->DestroyInstance(instance, pAllocator);
200
201 // Clean up logging callback, if any
202 while (instance_data->logging_callback.size() > 0) {
203 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
204 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
205 instance_data->logging_callback.pop_back();
206 }
207
208 layer_debug_report_destroy_instance(instance_data->report_data);
209 layer_data_map.erase(key);
210}
211
212VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
213 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
214 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
215 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
216
217 assert(chain_info->u.pLayerInfo);
218 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
219 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
220 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice");
221 if (fpCreateDevice == NULL) {
222 return VK_ERROR_INITIALIZATION_FAILED;
223 }
224
225 // Advance the link info for the next element on the chain
226 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
227
228 VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
229 if (result != VK_SUCCESS) {
230 return result;
231 }
232
233 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
234 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
235
236 // Setup layer's device dispatch table
237 my_device_data->device_dispatch_table = new VkLayerDispatchTable;
238 layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
239
240 createDeviceRegisterExtensions(pCreateInfo, *pDevice);
241 // Set gpu for this device in order to get at any objects mapped at instance level
242
243 my_device_data->gpu = gpu;
244
245 return result;
246}
247
248VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
249 dispatch_key key = get_dispatch_key(device);
250 layer_data *dev_data = get_my_data_ptr(key, layer_data_map);
251
252 layer_debug_report_destroy_device(device);
253 dev_data->device_dispatch_table->DestroyDevice(device, pAllocator);
254 layer_data_map.erase(key);
255}
256
257static const VkLayerProperties globalLayerProps = {"VK_LAYER_GOOGLE_unique_objects",
258 VK_LAYER_API_VERSION, // specVersion
259 1, // implementationVersion
260 "Google Validation Layer"};
261
262static inline PFN_vkVoidFunction layer_intercept_proc(const char *name) {
263 for (int i = 0; i < sizeof(procmap) / sizeof(procmap[0]); i++) {
264 if (!strcmp(name, procmap[i].name))
265 return procmap[i].pFunc;
266 }
267 return NULL;
268}
269
270VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
271 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
272}
273
274VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
275 VkLayerProperties *pProperties) {
276 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
277}
278
279VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
280 VkExtensionProperties *pProperties) {
281 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
282 return util_GetExtensionProperties(0, NULL, pCount, pProperties);
283
284 return VK_ERROR_LAYER_NOT_PRESENT;
285}
286
287VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
288 uint32_t *pCount, VkExtensionProperties *pProperties) {
289 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
290 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
291
292 assert(physicalDevice);
293
294 dispatch_key key = get_dispatch_key(physicalDevice);
295 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
296 return instance_data->instance_dispatch_table->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
297}
298
299VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
300 PFN_vkVoidFunction addr;
301 assert(device);
302 addr = layer_intercept_proc(funcName);
303 if (addr) {
304 return addr;
305 }
306
307 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
308 VkLayerDispatchTable *disp_table = dev_data->device_dispatch_table;
309 if (disp_table->GetDeviceProcAddr == NULL) {
310 return NULL;
311 }
312 return disp_table->GetDeviceProcAddr(device, funcName);
313}
314
315VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
316 PFN_vkVoidFunction addr;
317
318 addr = layer_intercept_proc(funcName);
319 if (addr) {
320 return addr;
321 }
322 assert(instance);
323
324 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
325 addr = debug_report_get_instance_proc_addr(instance_data->report_data, funcName);
326 if (addr) {
327 return addr;
328 }
329
330 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
331 if (disp_table->GetInstanceProcAddr == NULL) {
332 return NULL;
333 }
334 return disp_table->GetInstanceProcAddr(instance, funcName);
335}
336
337VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
338 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
339 const VkMemoryAllocateInfo *input_allocate_info = pAllocateInfo;
340 std::unique_ptr<safe_VkMemoryAllocateInfo> safe_allocate_info;
Mark Lobodzinskid2443222016-10-07 14:13:38 -0600341 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV> safe_dedicated_allocate_info;
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600342 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
343
344 if ((pAllocateInfo != nullptr) &&
345 ContainsExtStruct(pAllocateInfo, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)) {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600346 // Assuming there is only one extension struct of this type in the list for now
347 safe_dedicated_allocate_info =
348 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV>(new safe_VkDedicatedAllocationMemoryAllocateInfoNV);
349 safe_allocate_info = std::unique_ptr<safe_VkMemoryAllocateInfo>(new safe_VkMemoryAllocateInfo(pAllocateInfo));
350 input_allocate_info = reinterpret_cast<const VkMemoryAllocateInfo *>(safe_allocate_info.get());
351
352 const GenericHeader *orig_pnext = reinterpret_cast<const GenericHeader *>(pAllocateInfo->pNext);
353 GenericHeader *input_pnext = reinterpret_cast<GenericHeader *>(safe_allocate_info.get());
354 while (orig_pnext != nullptr) {
355 if (orig_pnext->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV) {
356 safe_dedicated_allocate_info->initialize(
357 reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(orig_pnext));
358
359 std::unique_lock<std::mutex> lock(global_lock);
360
361 if (safe_dedicated_allocate_info->buffer != VK_NULL_HANDLE) {
362 uint64_t local_buffer = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->buffer);
363 safe_dedicated_allocate_info->buffer =
364 reinterpret_cast<VkBuffer &>(device_data->unique_id_mapping[local_buffer]);
365 }
366
367 if (safe_dedicated_allocate_info->image != VK_NULL_HANDLE) {
368 uint64_t local_image = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->image);
369 safe_dedicated_allocate_info->image = reinterpret_cast<VkImage &>(device_data->unique_id_mapping[local_image]);
370 }
371
372 lock.unlock();
373
374 input_pnext->pNext = reinterpret_cast<GenericHeader *>(safe_dedicated_allocate_info.get());
375 input_pnext = reinterpret_cast<GenericHeader *>(input_pnext->pNext);
376 } else {
377 // TODO: generic handling of pNext copies
378 }
379
380 orig_pnext = reinterpret_cast<const GenericHeader *>(orig_pnext->pNext);
381 }
382 }
383
384 VkResult result = device_data->device_dispatch_table->AllocateMemory(device, input_allocate_info, pAllocator, pMemory);
385
386 if (VK_SUCCESS == result) {
387 std::lock_guard<std::mutex> lock(global_lock);
388 uint64_t unique_id = global_unique_id++;
389 device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pMemory);
390 *pMemory = reinterpret_cast<VkDeviceMemory &>(unique_id);
391 }
392
393 return result;
394}
395
396VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
397 const VkComputePipelineCreateInfo *pCreateInfos,
398 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
399 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
400 safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
401 if (pCreateInfos) {
402 std::lock_guard<std::mutex> lock(global_lock);
403 local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
404 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
405 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
406 if (pCreateInfos[idx0].basePipelineHandle) {
407 local_pCreateInfos[idx0].basePipelineHandle =
408 (VkPipeline)my_device_data
409 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
410 }
411 if (pCreateInfos[idx0].layout) {
412 local_pCreateInfos[idx0].layout =
413 (VkPipelineLayout)
414 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
415 }
416 if (pCreateInfos[idx0].stage.module) {
417 local_pCreateInfos[idx0].stage.module =
418 (VkShaderModule)
419 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].stage.module)];
420 }
421 }
422 }
423 if (pipelineCache) {
424 std::lock_guard<std::mutex> lock(global_lock);
425 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
426 }
427
428 VkResult result = my_device_data->device_dispatch_table->CreateComputePipelines(
429 device, pipelineCache, createInfoCount, (const VkComputePipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
430 delete[] local_pCreateInfos;
431 if (VK_SUCCESS == result) {
432 uint64_t unique_id = 0;
433 std::lock_guard<std::mutex> lock(global_lock);
434 for (uint32_t i = 0; i < createInfoCount; ++i) {
435 unique_id = global_unique_id++;
436 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
437 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
438 }
439 }
440 return result;
441}
442
443VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
444 const VkGraphicsPipelineCreateInfo *pCreateInfos,
445 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
446 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
447 safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = NULL;
448 if (pCreateInfos) {
449 local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
450 std::lock_guard<std::mutex> lock(global_lock);
451 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
452 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
453 if (pCreateInfos[idx0].basePipelineHandle) {
454 local_pCreateInfos[idx0].basePipelineHandle =
455 (VkPipeline)my_device_data
456 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
457 }
458 if (pCreateInfos[idx0].layout) {
459 local_pCreateInfos[idx0].layout =
460 (VkPipelineLayout)
461 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
462 }
463 if (pCreateInfos[idx0].pStages) {
464 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
465 if (pCreateInfos[idx0].pStages[idx1].module) {
466 local_pCreateInfos[idx0].pStages[idx1].module =
467 (VkShaderModule)my_device_data
468 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].pStages[idx1].module)];
469 }
470 }
471 }
472 if (pCreateInfos[idx0].renderPass) {
473 local_pCreateInfos[idx0].renderPass =
474 (VkRenderPass)
475 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].renderPass)];
476 }
477 }
478 }
479 if (pipelineCache) {
480 std::lock_guard<std::mutex> lock(global_lock);
481 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
482 }
483
484 VkResult result = my_device_data->device_dispatch_table->CreateGraphicsPipelines(
485 device, pipelineCache, createInfoCount, (const VkGraphicsPipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
486 delete[] local_pCreateInfos;
487 if (VK_SUCCESS == result) {
488 uint64_t unique_id = 0;
489 std::lock_guard<std::mutex> lock(global_lock);
490 for (uint32_t i = 0; i < createInfoCount; ++i) {
491 unique_id = global_unique_id++;
492 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
493 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
494 }
495 }
496 return result;
497}
498
499VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
500 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
501 const VkAllocationCallbacks *pAllocator,
502 VkDebugReportCallbackEXT *pMsgCallback) {
503 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
504 VkResult result =
505 instance_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
506
507 if (VK_SUCCESS == result) {
508 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pMsgCallback);
509 }
510 return result;
511}
512
513VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
514 const VkAllocationCallbacks *pAllocator) {
515 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
516 instance_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
517 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
518}
519
520VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
521 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
522 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
523 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
524 instance_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix,
525 pMsg);
526}
527
528VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
529 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
530 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
531 safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
532 if (pCreateInfo) {
533 std::lock_guard<std::mutex> lock(global_lock);
534 local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
535 local_pCreateInfo->oldSwapchain =
536 (VkSwapchainKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->oldSwapchain)];
537 // Need to pull surface mapping from the instance-level map
538 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(my_map_data->gpu), layer_data_map);
539 local_pCreateInfo->surface =
540 (VkSurfaceKHR)instance_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->surface)];
541 }
542
543 VkResult result = my_map_data->device_dispatch_table->CreateSwapchainKHR(
544 device, (const VkSwapchainCreateInfoKHR *)local_pCreateInfo, pAllocator, pSwapchain);
545 if (local_pCreateInfo) {
546 delete local_pCreateInfo;
547 }
548 if (VK_SUCCESS == result) {
549 std::lock_guard<std::mutex> lock(global_lock);
550 uint64_t unique_id = global_unique_id++;
551 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSwapchain);
552 *pSwapchain = reinterpret_cast<VkSwapchainKHR &>(unique_id);
553 }
554 return result;
555}
556
557VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
558 VkImage *pSwapchainImages) {
559 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
560 if (VK_NULL_HANDLE != swapchain) {
561 std::lock_guard<std::mutex> lock(global_lock);
562 swapchain = (VkSwapchainKHR)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(swapchain)];
563 }
564 VkResult result =
565 my_device_data->device_dispatch_table->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
566 // TODO : Need to add corresponding code to delete these images
567 if (VK_SUCCESS == result) {
568 if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
569 uint64_t unique_id = 0;
570 std::lock_guard<std::mutex> lock(global_lock);
571 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
572 unique_id = global_unique_id++;
573 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pSwapchainImages[i]);
574 pSwapchainImages[i] = reinterpret_cast<VkImage &>(unique_id);
575 }
576 }
577 }
578 return result;
579}
580
581#ifndef __ANDROID__
582VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
583 VkDisplayPropertiesKHR *pProperties) {
584 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
585 safe_VkDisplayPropertiesKHR *local_pProperties = NULL;
586 {
587 std::lock_guard<std::mutex> lock(global_lock);
588 if (pProperties) {
589 local_pProperties = new safe_VkDisplayPropertiesKHR[*pPropertyCount];
590 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
591 local_pProperties[idx0].initialize(&pProperties[idx0]);
592 if (pProperties[idx0].display) {
593 local_pProperties[idx0].display =
594 (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pProperties[idx0].display)];
595 }
596 }
597 }
598 }
599
600 VkResult result = my_map_data->instance_dispatch_table->GetPhysicalDeviceDisplayPropertiesKHR(
601 physicalDevice, pPropertyCount, (VkDisplayPropertiesKHR *)local_pProperties);
602 if (result == VK_SUCCESS && pProperties) {
603 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
604 std::lock_guard<std::mutex> lock(global_lock);
605
606 uint64_t unique_id = global_unique_id++;
607 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].display);
608 pProperties[idx0].display = reinterpret_cast<VkDisplayKHR &>(unique_id);
609 pProperties[idx0].displayName = local_pProperties[idx0].displayName;
610 pProperties[idx0].physicalDimensions = local_pProperties[idx0].physicalDimensions;
611 pProperties[idx0].physicalResolution = local_pProperties[idx0].physicalResolution;
612 pProperties[idx0].supportedTransforms = local_pProperties[idx0].supportedTransforms;
613 pProperties[idx0].planeReorderPossible = local_pProperties[idx0].planeReorderPossible;
614 pProperties[idx0].persistentContent = local_pProperties[idx0].persistentContent;
615 }
616 }
617 if (local_pProperties) {
618 delete[] local_pProperties;
619 }
620 return result;
621}
622
623VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
624 uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
625 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
626 VkResult result = my_map_data->instance_dispatch_table->GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
627 pDisplayCount, pDisplays);
628 if (VK_SUCCESS == result) {
629 if ((*pDisplayCount > 0) && pDisplays) {
630 std::lock_guard<std::mutex> lock(global_lock);
631 for (uint32_t i = 0; i < *pDisplayCount; i++) {
632 auto it = my_map_data->unique_id_mapping.find(reinterpret_cast<const uint64_t &>(pDisplays[i]));
633 assert(it != my_map_data->unique_id_mapping.end());
634 pDisplays[i] = reinterpret_cast<VkDisplayKHR &>(it->second);
635 }
636 }
637 }
638 return result;
639}
640
641VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
642 uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties) {
643 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
644 safe_VkDisplayModePropertiesKHR *local_pProperties = NULL;
645 {
646 std::lock_guard<std::mutex> lock(global_lock);
647 display = (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<uint64_t &>(display)];
648 if (pProperties) {
649 local_pProperties = new safe_VkDisplayModePropertiesKHR[*pPropertyCount];
650 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
651 local_pProperties[idx0].initialize(&pProperties[idx0]);
652 }
653 }
654 }
655
656 VkResult result = my_map_data->instance_dispatch_table->GetDisplayModePropertiesKHR(
657 physicalDevice, display, pPropertyCount, (VkDisplayModePropertiesKHR *)local_pProperties);
658 if (result == VK_SUCCESS && pProperties) {
659 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
660 std::lock_guard<std::mutex> lock(global_lock);
661
662 uint64_t unique_id = global_unique_id++;
663 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].displayMode);
664 pProperties[idx0].displayMode = reinterpret_cast<VkDisplayModeKHR &>(unique_id);
665 pProperties[idx0].parameters.visibleRegion.width = local_pProperties[idx0].parameters.visibleRegion.width;
666 pProperties[idx0].parameters.visibleRegion.height = local_pProperties[idx0].parameters.visibleRegion.height;
667 pProperties[idx0].parameters.refreshRate = local_pProperties[idx0].parameters.refreshRate;
668 }
669 }
670 if (local_pProperties) {
671 delete[] local_pProperties;
672 }
673 return result;
674}
675#endif
676
677} // namespace unique_objects
678
679// vk_layer_logging.h expects these to be defined
680VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
681 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
682 const VkAllocationCallbacks *pAllocator,
683 VkDebugReportCallbackEXT *pMsgCallback) {
684 return unique_objects::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
685}
686
687VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
688 const VkAllocationCallbacks *pAllocator) {
689 unique_objects::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
690}
691
692VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
693 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
694 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
695 unique_objects::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
696}
697
698VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
699 VkExtensionProperties *pProperties) {
700 return unique_objects::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
701}
702
703VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
704 VkLayerProperties *pProperties) {
705 return unique_objects::EnumerateInstanceLayerProperties(pCount, pProperties);
706}
707
708VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
709 VkLayerProperties *pProperties) {
710 assert(physicalDevice == VK_NULL_HANDLE);
711 return unique_objects::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
712}
713
714VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
715 return unique_objects::GetDeviceProcAddr(dev, funcName);
716}
717
718VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
719 return unique_objects::GetInstanceProcAddr(instance, funcName);
720}
721
722VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
723 const char *pLayerName, uint32_t *pCount,
724 VkExtensionProperties *pProperties) {
725 assert(physicalDevice == VK_NULL_HANDLE);
726 return unique_objects::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
727}