blob: d4aabdfe8c2eab6cf28caea98cf47b1e0ecd18ed [file] [log] [blame]
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Tobin Ehlis <tobine@google.com>
20 * Author: Mark Lobodzinski <mark@lunarg.com>
21 */
22
23#include <stdio.h>
24#include <stdlib.h>
25#include <string.h>
26#include <unordered_map>
27#include <vector>
28#include <list>
29#include <memory>
30
31#include "vk_loader_platform.h"
32#include "vulkan/vk_layer.h"
33#include "vk_layer_config.h"
34#include "vk_layer_extension_utils.h"
35#include "vk_layer_utils.h"
36#include "vk_layer_table.h"
37#include "vk_layer_logging.h"
38#include "unique_objects.h"
39#include "vk_dispatch_table_helper.h"
40#include "vk_struct_string_helper_cpp.h"
41#include "vk_layer_data.h"
42#include "vk_layer_utils.h"
43
Mike Stroyanb985fca2016-11-01 11:50:16 -060044// This intentionally includes a cpp file
45#include "vk_safe_struct.cpp"
46
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060047#include "unique_objects_wrappers.h"
48
49namespace unique_objects {
50
51static void initUniqueObjects(layer_data *instance_data, const VkAllocationCallbacks *pAllocator) {
52 layer_debug_actions(instance_data->report_data, instance_data->logging_callback, pAllocator, "google_unique_objects");
53}
54
55// Handle CreateInstance Extensions
56static void checkInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
57 uint32_t i;
58 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
59 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
60 instance_ext_map[disp_table] = {};
61
62 for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
63 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
64 instance_ext_map[disp_table].wsi_enabled = true;
65 }
66 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
67 instance_ext_map[disp_table].display_enabled = true;
68 }
69#ifdef VK_USE_PLATFORM_XLIB_KHR
70 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
71 instance_ext_map[disp_table].xlib_enabled = true;
72 }
73#endif
74#ifdef VK_USE_PLATFORM_XCB_KHR
75 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
76 instance_ext_map[disp_table].xcb_enabled = true;
77 }
78#endif
79#ifdef VK_USE_PLATFORM_WAYLAND_KHR
80 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
81 instance_ext_map[disp_table].wayland_enabled = true;
82 }
83#endif
84#ifdef VK_USE_PLATFORM_MIR_KHR
85 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
86 instance_ext_map[disp_table].mir_enabled = true;
87 }
88#endif
89#ifdef VK_USE_PLATFORM_ANDROID_KHR
90 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
91 instance_ext_map[disp_table].android_enabled = true;
92 }
93#endif
94#ifdef VK_USE_PLATFORM_WIN32_KHR
95 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
96 instance_ext_map[disp_table].win32_enabled = true;
97 }
98#endif
99
100 // Check for recognized instance extensions
101 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
102 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedInstanceExtensions)) {
103 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
104 0, "UniqueObjects",
105 "Instance Extension %s is not supported by this layer. Using this extension may adversely affect "
106 "validation results and/or produce undefined behavior.",
107 pCreateInfo->ppEnabledExtensionNames[i]);
108 }
109 }
110}
111
112// Handle CreateDevice Extensions
113static void createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
114 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
115 VkLayerDispatchTable *disp_table = device_data->device_dispatch_table;
116 PFN_vkGetDeviceProcAddr gpa = disp_table->GetDeviceProcAddr;
117
118 device_data->device_dispatch_table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
119 disp_table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
120 disp_table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
121 disp_table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
122 disp_table->QueuePresentKHR = (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
123 device_data->wsi_enabled = false;
124
125 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
126 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
127 device_data->wsi_enabled = true;
128 }
129 // Check for recognized device extensions
130 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedDeviceExtensions)) {
131 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
132 0, "UniqueObjects",
133 "Device Extension %s is not supported by this layer. Using this extension may adversely affect "
134 "validation results and/or produce undefined behavior.",
135 pCreateInfo->ppEnabledExtensionNames[i]);
136 }
137 }
138}
139
140VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
141 VkInstance *pInstance) {
142 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
143
144 assert(chain_info->u.pLayerInfo);
145 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
146 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
147 if (fpCreateInstance == NULL) {
148 return VK_ERROR_INITIALIZATION_FAILED;
149 }
150
151 // Advance the link info for the next element on the chain
152 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
153
154 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
155 if (result != VK_SUCCESS) {
156 return result;
157 }
158
159 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
160 instance_data->instance = *pInstance;
161 instance_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
162 layer_init_instance_dispatch_table(*pInstance, instance_data->instance_dispatch_table, fpGetInstanceProcAddr);
163
164 instance_data->instance = *pInstance;
165 instance_data->report_data =
166 debug_report_create_instance(instance_data->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount,
167 pCreateInfo->ppEnabledExtensionNames);
168
169 // Set up temporary debug callbacks to output messages at CreateInstance-time
170 if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
171 &instance_data->tmp_callbacks)) {
172 if (instance_data->num_tmp_callbacks > 0) {
173 if (layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
174 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
175 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
176 instance_data->num_tmp_callbacks = 0;
177 }
178 }
179 }
180
181 initUniqueObjects(instance_data, pAllocator);
182 checkInstanceRegisterExtensions(pCreateInfo, *pInstance);
183
184 // Disable and free tmp callbacks, no longer necessary
185 if (instance_data->num_tmp_callbacks > 0) {
186 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
187 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
188 instance_data->num_tmp_callbacks = 0;
189 }
190
191 return result;
192}
193
194VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
195 dispatch_key key = get_dispatch_key(instance);
196 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
197 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
198 instance_ext_map.erase(disp_table);
199 disp_table->DestroyInstance(instance, pAllocator);
200
201 // Clean up logging callback, if any
202 while (instance_data->logging_callback.size() > 0) {
203 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
204 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
205 instance_data->logging_callback.pop_back();
206 }
207
208 layer_debug_report_destroy_instance(instance_data->report_data);
209 layer_data_map.erase(key);
210}
211
212VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
213 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
214 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
215 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
216
217 assert(chain_info->u.pLayerInfo);
218 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
219 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
220 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice");
221 if (fpCreateDevice == NULL) {
222 return VK_ERROR_INITIALIZATION_FAILED;
223 }
224
225 // Advance the link info for the next element on the chain
226 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
227
228 VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
229 if (result != VK_SUCCESS) {
230 return result;
231 }
232
233 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
234 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
235
236 // Setup layer's device dispatch table
237 my_device_data->device_dispatch_table = new VkLayerDispatchTable;
238 layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
239
240 createDeviceRegisterExtensions(pCreateInfo, *pDevice);
241 // Set gpu for this device in order to get at any objects mapped at instance level
242
243 my_device_data->gpu = gpu;
244
245 return result;
246}
247
248VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
249 dispatch_key key = get_dispatch_key(device);
250 layer_data *dev_data = get_my_data_ptr(key, layer_data_map);
251
252 layer_debug_report_destroy_device(device);
253 dev_data->device_dispatch_table->DestroyDevice(device, pAllocator);
254 layer_data_map.erase(key);
255}
256
257static const VkLayerProperties globalLayerProps = {"VK_LAYER_GOOGLE_unique_objects",
258 VK_LAYER_API_VERSION, // specVersion
259 1, // implementationVersion
260 "Google Validation Layer"};
261
262static inline PFN_vkVoidFunction layer_intercept_proc(const char *name) {
Jamie Madill6069c822016-12-15 09:35:36 -0500263 for (unsigned int i = 0; i < sizeof(procmap) / sizeof(procmap[0]); i++) {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600264 if (!strcmp(name, procmap[i].name))
265 return procmap[i].pFunc;
266 }
267 return NULL;
268}
269
270VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
271 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
272}
273
274VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
275 VkLayerProperties *pProperties) {
276 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
277}
278
279VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
280 VkExtensionProperties *pProperties) {
281 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
282 return util_GetExtensionProperties(0, NULL, pCount, pProperties);
283
284 return VK_ERROR_LAYER_NOT_PRESENT;
285}
286
287VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
288 uint32_t *pCount, VkExtensionProperties *pProperties) {
289 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
290 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
291
292 assert(physicalDevice);
293
294 dispatch_key key = get_dispatch_key(physicalDevice);
295 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
296 return instance_data->instance_dispatch_table->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
297}
298
299VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
300 PFN_vkVoidFunction addr;
301 assert(device);
302 addr = layer_intercept_proc(funcName);
303 if (addr) {
304 return addr;
305 }
306
307 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
308 VkLayerDispatchTable *disp_table = dev_data->device_dispatch_table;
309 if (disp_table->GetDeviceProcAddr == NULL) {
310 return NULL;
311 }
312 return disp_table->GetDeviceProcAddr(device, funcName);
313}
314
315VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
316 PFN_vkVoidFunction addr;
317
318 addr = layer_intercept_proc(funcName);
319 if (addr) {
320 return addr;
321 }
322 assert(instance);
323
324 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
325 addr = debug_report_get_instance_proc_addr(instance_data->report_data, funcName);
326 if (addr) {
327 return addr;
328 }
329
330 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
331 if (disp_table->GetInstanceProcAddr == NULL) {
332 return NULL;
333 }
334 return disp_table->GetInstanceProcAddr(instance, funcName);
335}
336
337VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
338 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
339 const VkMemoryAllocateInfo *input_allocate_info = pAllocateInfo;
340 std::unique_ptr<safe_VkMemoryAllocateInfo> safe_allocate_info;
Mark Lobodzinskid2443222016-10-07 14:13:38 -0600341 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV> safe_dedicated_allocate_info;
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600342 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
343
344 if ((pAllocateInfo != nullptr) &&
345 ContainsExtStruct(pAllocateInfo, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)) {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600346 // Assuming there is only one extension struct of this type in the list for now
347 safe_dedicated_allocate_info =
348 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV>(new safe_VkDedicatedAllocationMemoryAllocateInfoNV);
349 safe_allocate_info = std::unique_ptr<safe_VkMemoryAllocateInfo>(new safe_VkMemoryAllocateInfo(pAllocateInfo));
350 input_allocate_info = reinterpret_cast<const VkMemoryAllocateInfo *>(safe_allocate_info.get());
351
352 const GenericHeader *orig_pnext = reinterpret_cast<const GenericHeader *>(pAllocateInfo->pNext);
353 GenericHeader *input_pnext = reinterpret_cast<GenericHeader *>(safe_allocate_info.get());
354 while (orig_pnext != nullptr) {
355 if (orig_pnext->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV) {
356 safe_dedicated_allocate_info->initialize(
357 reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(orig_pnext));
358
359 std::unique_lock<std::mutex> lock(global_lock);
360
361 if (safe_dedicated_allocate_info->buffer != VK_NULL_HANDLE) {
362 uint64_t local_buffer = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->buffer);
363 safe_dedicated_allocate_info->buffer =
364 reinterpret_cast<VkBuffer &>(device_data->unique_id_mapping[local_buffer]);
365 }
366
367 if (safe_dedicated_allocate_info->image != VK_NULL_HANDLE) {
368 uint64_t local_image = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->image);
369 safe_dedicated_allocate_info->image = reinterpret_cast<VkImage &>(device_data->unique_id_mapping[local_image]);
370 }
371
372 lock.unlock();
373
374 input_pnext->pNext = reinterpret_cast<GenericHeader *>(safe_dedicated_allocate_info.get());
375 input_pnext = reinterpret_cast<GenericHeader *>(input_pnext->pNext);
376 } else {
377 // TODO: generic handling of pNext copies
378 }
379
380 orig_pnext = reinterpret_cast<const GenericHeader *>(orig_pnext->pNext);
381 }
382 }
383
384 VkResult result = device_data->device_dispatch_table->AllocateMemory(device, input_allocate_info, pAllocator, pMemory);
385
386 if (VK_SUCCESS == result) {
387 std::lock_guard<std::mutex> lock(global_lock);
388 uint64_t unique_id = global_unique_id++;
389 device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pMemory);
390 *pMemory = reinterpret_cast<VkDeviceMemory &>(unique_id);
391 }
392
393 return result;
394}
395
396VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
397 const VkComputePipelineCreateInfo *pCreateInfos,
398 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
399 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
400 safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
401 if (pCreateInfos) {
402 std::lock_guard<std::mutex> lock(global_lock);
403 local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
404 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
405 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
406 if (pCreateInfos[idx0].basePipelineHandle) {
407 local_pCreateInfos[idx0].basePipelineHandle =
408 (VkPipeline)my_device_data
409 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
410 }
411 if (pCreateInfos[idx0].layout) {
412 local_pCreateInfos[idx0].layout =
413 (VkPipelineLayout)
414 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
415 }
416 if (pCreateInfos[idx0].stage.module) {
417 local_pCreateInfos[idx0].stage.module =
418 (VkShaderModule)
419 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].stage.module)];
420 }
421 }
422 }
423 if (pipelineCache) {
424 std::lock_guard<std::mutex> lock(global_lock);
425 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
426 }
427
428 VkResult result = my_device_data->device_dispatch_table->CreateComputePipelines(
429 device, pipelineCache, createInfoCount, (const VkComputePipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
430 delete[] local_pCreateInfos;
Maciej Jesionowski42200702016-11-23 10:44:34 +0100431 {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600432 uint64_t unique_id = 0;
433 std::lock_guard<std::mutex> lock(global_lock);
434 for (uint32_t i = 0; i < createInfoCount; ++i) {
Maciej Jesionowski42200702016-11-23 10:44:34 +0100435 if (pPipelines[i] != VK_NULL_HANDLE) {
436 unique_id = global_unique_id++;
437 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
438 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
439 }
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600440 }
441 }
442 return result;
443}
444
445VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
446 const VkGraphicsPipelineCreateInfo *pCreateInfos,
447 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
448 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
449 safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = NULL;
450 if (pCreateInfos) {
451 local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
452 std::lock_guard<std::mutex> lock(global_lock);
453 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
454 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
455 if (pCreateInfos[idx0].basePipelineHandle) {
456 local_pCreateInfos[idx0].basePipelineHandle =
457 (VkPipeline)my_device_data
458 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
459 }
460 if (pCreateInfos[idx0].layout) {
461 local_pCreateInfos[idx0].layout =
462 (VkPipelineLayout)
463 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
464 }
465 if (pCreateInfos[idx0].pStages) {
466 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
467 if (pCreateInfos[idx0].pStages[idx1].module) {
468 local_pCreateInfos[idx0].pStages[idx1].module =
469 (VkShaderModule)my_device_data
470 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].pStages[idx1].module)];
471 }
472 }
473 }
474 if (pCreateInfos[idx0].renderPass) {
475 local_pCreateInfos[idx0].renderPass =
476 (VkRenderPass)
477 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].renderPass)];
478 }
479 }
480 }
481 if (pipelineCache) {
482 std::lock_guard<std::mutex> lock(global_lock);
483 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
484 }
485
486 VkResult result = my_device_data->device_dispatch_table->CreateGraphicsPipelines(
487 device, pipelineCache, createInfoCount, (const VkGraphicsPipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
488 delete[] local_pCreateInfos;
Maciej Jesionowski42200702016-11-23 10:44:34 +0100489 {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600490 uint64_t unique_id = 0;
491 std::lock_guard<std::mutex> lock(global_lock);
492 for (uint32_t i = 0; i < createInfoCount; ++i) {
Maciej Jesionowski42200702016-11-23 10:44:34 +0100493 if (pPipelines[i] != VK_NULL_HANDLE) {
494 unique_id = global_unique_id++;
495 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
496 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
497 }
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600498 }
499 }
500 return result;
501}
502
503VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
504 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
505 const VkAllocationCallbacks *pAllocator,
506 VkDebugReportCallbackEXT *pMsgCallback) {
507 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
508 VkResult result =
509 instance_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
510
511 if (VK_SUCCESS == result) {
512 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pMsgCallback);
513 }
514 return result;
515}
516
517VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
518 const VkAllocationCallbacks *pAllocator) {
519 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
520 instance_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
521 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
522}
523
524VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
525 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
526 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
527 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
528 instance_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix,
529 pMsg);
530}
531
532VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
533 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
534 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
535 safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
536 if (pCreateInfo) {
537 std::lock_guard<std::mutex> lock(global_lock);
538 local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
539 local_pCreateInfo->oldSwapchain =
540 (VkSwapchainKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->oldSwapchain)];
541 // Need to pull surface mapping from the instance-level map
542 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(my_map_data->gpu), layer_data_map);
543 local_pCreateInfo->surface =
544 (VkSurfaceKHR)instance_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->surface)];
545 }
546
547 VkResult result = my_map_data->device_dispatch_table->CreateSwapchainKHR(
548 device, (const VkSwapchainCreateInfoKHR *)local_pCreateInfo, pAllocator, pSwapchain);
549 if (local_pCreateInfo) {
550 delete local_pCreateInfo;
551 }
552 if (VK_SUCCESS == result) {
553 std::lock_guard<std::mutex> lock(global_lock);
554 uint64_t unique_id = global_unique_id++;
555 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSwapchain);
556 *pSwapchain = reinterpret_cast<VkSwapchainKHR &>(unique_id);
557 }
558 return result;
559}
560
561VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
562 VkImage *pSwapchainImages) {
563 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
564 if (VK_NULL_HANDLE != swapchain) {
565 std::lock_guard<std::mutex> lock(global_lock);
566 swapchain = (VkSwapchainKHR)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(swapchain)];
567 }
568 VkResult result =
569 my_device_data->device_dispatch_table->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
570 // TODO : Need to add corresponding code to delete these images
571 if (VK_SUCCESS == result) {
572 if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
573 uint64_t unique_id = 0;
574 std::lock_guard<std::mutex> lock(global_lock);
575 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
576 unique_id = global_unique_id++;
577 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pSwapchainImages[i]);
578 pSwapchainImages[i] = reinterpret_cast<VkImage &>(unique_id);
579 }
580 }
581 }
582 return result;
583}
584
585#ifndef __ANDROID__
586VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
587 VkDisplayPropertiesKHR *pProperties) {
588 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
589 safe_VkDisplayPropertiesKHR *local_pProperties = NULL;
590 {
591 std::lock_guard<std::mutex> lock(global_lock);
592 if (pProperties) {
593 local_pProperties = new safe_VkDisplayPropertiesKHR[*pPropertyCount];
594 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
595 local_pProperties[idx0].initialize(&pProperties[idx0]);
596 if (pProperties[idx0].display) {
597 local_pProperties[idx0].display =
598 (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pProperties[idx0].display)];
599 }
600 }
601 }
602 }
603
604 VkResult result = my_map_data->instance_dispatch_table->GetPhysicalDeviceDisplayPropertiesKHR(
605 physicalDevice, pPropertyCount, (VkDisplayPropertiesKHR *)local_pProperties);
606 if (result == VK_SUCCESS && pProperties) {
607 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
608 std::lock_guard<std::mutex> lock(global_lock);
609
610 uint64_t unique_id = global_unique_id++;
611 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].display);
612 pProperties[idx0].display = reinterpret_cast<VkDisplayKHR &>(unique_id);
613 pProperties[idx0].displayName = local_pProperties[idx0].displayName;
614 pProperties[idx0].physicalDimensions = local_pProperties[idx0].physicalDimensions;
615 pProperties[idx0].physicalResolution = local_pProperties[idx0].physicalResolution;
616 pProperties[idx0].supportedTransforms = local_pProperties[idx0].supportedTransforms;
617 pProperties[idx0].planeReorderPossible = local_pProperties[idx0].planeReorderPossible;
618 pProperties[idx0].persistentContent = local_pProperties[idx0].persistentContent;
619 }
620 }
621 if (local_pProperties) {
622 delete[] local_pProperties;
623 }
624 return result;
625}
626
627VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
628 uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
629 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
630 VkResult result = my_map_data->instance_dispatch_table->GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
631 pDisplayCount, pDisplays);
632 if (VK_SUCCESS == result) {
633 if ((*pDisplayCount > 0) && pDisplays) {
634 std::lock_guard<std::mutex> lock(global_lock);
635 for (uint32_t i = 0; i < *pDisplayCount; i++) {
636 auto it = my_map_data->unique_id_mapping.find(reinterpret_cast<const uint64_t &>(pDisplays[i]));
637 assert(it != my_map_data->unique_id_mapping.end());
638 pDisplays[i] = reinterpret_cast<VkDisplayKHR &>(it->second);
639 }
640 }
641 }
642 return result;
643}
644
645VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
646 uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties) {
647 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
648 safe_VkDisplayModePropertiesKHR *local_pProperties = NULL;
649 {
650 std::lock_guard<std::mutex> lock(global_lock);
651 display = (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<uint64_t &>(display)];
652 if (pProperties) {
653 local_pProperties = new safe_VkDisplayModePropertiesKHR[*pPropertyCount];
654 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
655 local_pProperties[idx0].initialize(&pProperties[idx0]);
656 }
657 }
658 }
659
660 VkResult result = my_map_data->instance_dispatch_table->GetDisplayModePropertiesKHR(
661 physicalDevice, display, pPropertyCount, (VkDisplayModePropertiesKHR *)local_pProperties);
662 if (result == VK_SUCCESS && pProperties) {
663 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
664 std::lock_guard<std::mutex> lock(global_lock);
665
666 uint64_t unique_id = global_unique_id++;
667 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].displayMode);
668 pProperties[idx0].displayMode = reinterpret_cast<VkDisplayModeKHR &>(unique_id);
669 pProperties[idx0].parameters.visibleRegion.width = local_pProperties[idx0].parameters.visibleRegion.width;
670 pProperties[idx0].parameters.visibleRegion.height = local_pProperties[idx0].parameters.visibleRegion.height;
671 pProperties[idx0].parameters.refreshRate = local_pProperties[idx0].parameters.refreshRate;
672 }
673 }
674 if (local_pProperties) {
675 delete[] local_pProperties;
676 }
677 return result;
678}
Norbert Nopper1dec9a52016-11-25 07:55:13 +0100679
680VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
681 uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
682 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
683 {
684 std::lock_guard<std::mutex> lock(global_lock);
685 auto it = dev_data->unique_id_mapping.find(reinterpret_cast<uint64_t &>(mode));
686 if (it == dev_data->unique_id_mapping.end()) {
687 uint64_t unique_id = global_unique_id++;
688 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(mode);
689
690 mode = reinterpret_cast<VkDisplayModeKHR &>(unique_id);
691 } else {
692 mode = reinterpret_cast<VkDisplayModeKHR &>(it->second);
693 }
694 }
695 VkResult result =
696 dev_data->instance_dispatch_table->GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
697 return result;
698}
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600699#endif
700
701} // namespace unique_objects
702
703// vk_layer_logging.h expects these to be defined
704VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
705 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
706 const VkAllocationCallbacks *pAllocator,
707 VkDebugReportCallbackEXT *pMsgCallback) {
708 return unique_objects::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
709}
710
711VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
712 const VkAllocationCallbacks *pAllocator) {
713 unique_objects::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
714}
715
716VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
717 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
718 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
719 unique_objects::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
720}
721
722VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
723 VkExtensionProperties *pProperties) {
724 return unique_objects::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
725}
726
727VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
728 VkLayerProperties *pProperties) {
729 return unique_objects::EnumerateInstanceLayerProperties(pCount, pProperties);
730}
731
732VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
733 VkLayerProperties *pProperties) {
734 assert(physicalDevice == VK_NULL_HANDLE);
735 return unique_objects::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
736}
737
738VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
739 return unique_objects::GetDeviceProcAddr(dev, funcName);
740}
741
742VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
743 return unique_objects::GetInstanceProcAddr(instance, funcName);
744}
745
746VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
747 const char *pLayerName, uint32_t *pCount,
748 VkExtensionProperties *pProperties) {
749 assert(physicalDevice == VK_NULL_HANDLE);
750 return unique_objects::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
751}