blob: bd9aabd8d926d6edc5c4dcaaef232fb7858adbb8 [file] [log] [blame]
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -06001/*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 * Copyright (c) 2015-2016 Google, Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Tobin Ehlis <tobine@google.com>
20 * Author: Mark Lobodzinski <mark@lunarg.com>
21 */
22
23#include <stdio.h>
24#include <stdlib.h>
25#include <string.h>
26#include <unordered_map>
27#include <vector>
28#include <list>
29#include <memory>
30
Mike Weiblen6a27de52016-12-09 17:36:28 -070031// For Windows, this #include must come before other Vk headers.
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060032#include "vk_loader_platform.h"
Mike Weiblen6a27de52016-12-09 17:36:28 -070033
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060034#include "unique_objects.h"
35#include "vk_dispatch_table_helper.h"
Mike Weiblen6a27de52016-12-09 17:36:28 -070036#include "vk_layer_config.h"
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060037#include "vk_layer_data.h"
Mike Weiblen6a27de52016-12-09 17:36:28 -070038#include "vk_layer_extension_utils.h"
39#include "vk_layer_logging.h"
40#include "vk_layer_table.h"
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060041#include "vk_layer_utils.h"
Mike Weiblen6a27de52016-12-09 17:36:28 -070042#include "vk_layer_utils.h"
43#include "vk_struct_string_helper_cpp.h"
44#include "vk_validation_error_messages.h"
45#include "vulkan/vk_layer.h"
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060046
Mike Stroyanb985fca2016-11-01 11:50:16 -060047// This intentionally includes a cpp file
48#include "vk_safe_struct.cpp"
49
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060050#include "unique_objects_wrappers.h"
51
52namespace unique_objects {
53
54static void initUniqueObjects(layer_data *instance_data, const VkAllocationCallbacks *pAllocator) {
55 layer_debug_actions(instance_data->report_data, instance_data->logging_callback, pAllocator, "google_unique_objects");
56}
57
58// Handle CreateInstance Extensions
59static void checkInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) {
60 uint32_t i;
61 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
62 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
63 instance_ext_map[disp_table] = {};
64
65 for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
66 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
67 instance_ext_map[disp_table].wsi_enabled = true;
68 }
69 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
70 instance_ext_map[disp_table].display_enabled = true;
71 }
72#ifdef VK_USE_PLATFORM_XLIB_KHR
73 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
74 instance_ext_map[disp_table].xlib_enabled = true;
75 }
76#endif
77#ifdef VK_USE_PLATFORM_XCB_KHR
78 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
79 instance_ext_map[disp_table].xcb_enabled = true;
80 }
81#endif
82#ifdef VK_USE_PLATFORM_WAYLAND_KHR
83 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
84 instance_ext_map[disp_table].wayland_enabled = true;
85 }
86#endif
87#ifdef VK_USE_PLATFORM_MIR_KHR
88 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
89 instance_ext_map[disp_table].mir_enabled = true;
90 }
91#endif
92#ifdef VK_USE_PLATFORM_ANDROID_KHR
93 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
94 instance_ext_map[disp_table].android_enabled = true;
95 }
96#endif
97#ifdef VK_USE_PLATFORM_WIN32_KHR
98 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
99 instance_ext_map[disp_table].win32_enabled = true;
100 }
101#endif
102
103 // Check for recognized instance extensions
104 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
105 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedInstanceExtensions)) {
106 log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
Mike Weiblen6a27de52016-12-09 17:36:28 -0700107 VALIDATION_ERROR_UNDEFINED, "UniqueObjects",
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600108 "Instance Extension %s is not supported by this layer. Using this extension may adversely affect "
109 "validation results and/or produce undefined behavior.",
110 pCreateInfo->ppEnabledExtensionNames[i]);
111 }
112 }
113}
114
115// Handle CreateDevice Extensions
116static void createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) {
117 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
118 VkLayerDispatchTable *disp_table = device_data->device_dispatch_table;
119 PFN_vkGetDeviceProcAddr gpa = disp_table->GetDeviceProcAddr;
120
121 device_data->device_dispatch_table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
122 disp_table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
123 disp_table->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
124 disp_table->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
125 disp_table->QueuePresentKHR = (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
126 device_data->wsi_enabled = false;
127
128 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
129 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
130 device_data->wsi_enabled = true;
131 }
132 // Check for recognized device extensions
133 if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kUniqueObjectsSupportedDeviceExtensions)) {
134 log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
Mike Weiblen6a27de52016-12-09 17:36:28 -0700135 VALIDATION_ERROR_UNDEFINED, "UniqueObjects",
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600136 "Device Extension %s is not supported by this layer. Using this extension may adversely affect "
137 "validation results and/or produce undefined behavior.",
138 pCreateInfo->ppEnabledExtensionNames[i]);
139 }
140 }
141}
142
143VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator,
144 VkInstance *pInstance) {
145 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
146
147 assert(chain_info->u.pLayerInfo);
148 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
149 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
150 if (fpCreateInstance == NULL) {
151 return VK_ERROR_INITIALIZATION_FAILED;
152 }
153
154 // Advance the link info for the next element on the chain
155 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
156
157 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
158 if (result != VK_SUCCESS) {
159 return result;
160 }
161
162 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
163 instance_data->instance = *pInstance;
164 instance_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
165 layer_init_instance_dispatch_table(*pInstance, instance_data->instance_dispatch_table, fpGetInstanceProcAddr);
166
167 instance_data->instance = *pInstance;
168 instance_data->report_data =
169 debug_report_create_instance(instance_data->instance_dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount,
170 pCreateInfo->ppEnabledExtensionNames);
171
172 // Set up temporary debug callbacks to output messages at CreateInstance-time
173 if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos,
174 &instance_data->tmp_callbacks)) {
175 if (instance_data->num_tmp_callbacks > 0) {
176 if (layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks,
177 instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) {
178 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
179 instance_data->num_tmp_callbacks = 0;
180 }
181 }
182 }
183
184 initUniqueObjects(instance_data, pAllocator);
185 checkInstanceRegisterExtensions(pCreateInfo, *pInstance);
186
187 // Disable and free tmp callbacks, no longer necessary
188 if (instance_data->num_tmp_callbacks > 0) {
189 layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks);
190 layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks);
191 instance_data->num_tmp_callbacks = 0;
192 }
193
194 return result;
195}
196
197VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) {
198 dispatch_key key = get_dispatch_key(instance);
199 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
200 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
201 instance_ext_map.erase(disp_table);
202 disp_table->DestroyInstance(instance, pAllocator);
203
204 // Clean up logging callback, if any
205 while (instance_data->logging_callback.size() > 0) {
206 VkDebugReportCallbackEXT callback = instance_data->logging_callback.back();
207 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
208 instance_data->logging_callback.pop_back();
209 }
210
211 layer_debug_report_destroy_instance(instance_data->report_data);
212 layer_data_map.erase(key);
213}
214
215VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
216 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
217 layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
218 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
219
220 assert(chain_info->u.pLayerInfo);
221 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
222 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
223 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice");
224 if (fpCreateDevice == NULL) {
225 return VK_ERROR_INITIALIZATION_FAILED;
226 }
227
228 // Advance the link info for the next element on the chain
229 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
230
231 VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
232 if (result != VK_SUCCESS) {
233 return result;
234 }
235
236 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
237 my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
238
239 // Setup layer's device dispatch table
240 my_device_data->device_dispatch_table = new VkLayerDispatchTable;
241 layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
242
243 createDeviceRegisterExtensions(pCreateInfo, *pDevice);
244 // Set gpu for this device in order to get at any objects mapped at instance level
245
246 my_device_data->gpu = gpu;
247
248 return result;
249}
250
251VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
252 dispatch_key key = get_dispatch_key(device);
253 layer_data *dev_data = get_my_data_ptr(key, layer_data_map);
254
255 layer_debug_report_destroy_device(device);
256 dev_data->device_dispatch_table->DestroyDevice(device, pAllocator);
257 layer_data_map.erase(key);
258}
259
260static const VkLayerProperties globalLayerProps = {"VK_LAYER_GOOGLE_unique_objects",
261 VK_LAYER_API_VERSION, // specVersion
262 1, // implementationVersion
263 "Google Validation Layer"};
264
265static inline PFN_vkVoidFunction layer_intercept_proc(const char *name) {
Jamie Madill6069c822016-12-15 09:35:36 -0500266 for (unsigned int i = 0; i < sizeof(procmap) / sizeof(procmap[0]); i++) {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600267 if (!strcmp(name, procmap[i].name))
268 return procmap[i].pFunc;
269 }
270 return NULL;
271}
272
273VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
274 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
275}
276
277VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
278 VkLayerProperties *pProperties) {
279 return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties);
280}
281
282VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
283 VkExtensionProperties *pProperties) {
284 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
285 return util_GetExtensionProperties(0, NULL, pCount, pProperties);
286
287 return VK_ERROR_LAYER_NOT_PRESENT;
288}
289
290VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
291 uint32_t *pCount, VkExtensionProperties *pProperties) {
292 if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName))
293 return util_GetExtensionProperties(0, nullptr, pCount, pProperties);
294
295 assert(physicalDevice);
296
297 dispatch_key key = get_dispatch_key(physicalDevice);
298 layer_data *instance_data = get_my_data_ptr(key, layer_data_map);
299 return instance_data->instance_dispatch_table->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties);
300}
301
302VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
303 PFN_vkVoidFunction addr;
304 assert(device);
305 addr = layer_intercept_proc(funcName);
306 if (addr) {
307 return addr;
308 }
309
310 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
311 VkLayerDispatchTable *disp_table = dev_data->device_dispatch_table;
312 if (disp_table->GetDeviceProcAddr == NULL) {
313 return NULL;
314 }
315 return disp_table->GetDeviceProcAddr(device, funcName);
316}
317
318VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
319 PFN_vkVoidFunction addr;
320
321 addr = layer_intercept_proc(funcName);
322 if (addr) {
323 return addr;
324 }
325 assert(instance);
326
327 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
328 addr = debug_report_get_instance_proc_addr(instance_data->report_data, funcName);
329 if (addr) {
330 return addr;
331 }
332
333 VkLayerInstanceDispatchTable *disp_table = instance_data->instance_dispatch_table;
334 if (disp_table->GetInstanceProcAddr == NULL) {
335 return NULL;
336 }
337 return disp_table->GetInstanceProcAddr(instance, funcName);
338}
339
340VKAPI_ATTR VkResult VKAPI_CALL AllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
341 const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory) {
342 const VkMemoryAllocateInfo *input_allocate_info = pAllocateInfo;
343 std::unique_ptr<safe_VkMemoryAllocateInfo> safe_allocate_info;
Mark Lobodzinskid2443222016-10-07 14:13:38 -0600344 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV> safe_dedicated_allocate_info;
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600345 layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
346
347 if ((pAllocateInfo != nullptr) &&
348 ContainsExtStruct(pAllocateInfo, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV)) {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600349 // Assuming there is only one extension struct of this type in the list for now
350 safe_dedicated_allocate_info =
351 std::unique_ptr<safe_VkDedicatedAllocationMemoryAllocateInfoNV>(new safe_VkDedicatedAllocationMemoryAllocateInfoNV);
352 safe_allocate_info = std::unique_ptr<safe_VkMemoryAllocateInfo>(new safe_VkMemoryAllocateInfo(pAllocateInfo));
353 input_allocate_info = reinterpret_cast<const VkMemoryAllocateInfo *>(safe_allocate_info.get());
354
355 const GenericHeader *orig_pnext = reinterpret_cast<const GenericHeader *>(pAllocateInfo->pNext);
356 GenericHeader *input_pnext = reinterpret_cast<GenericHeader *>(safe_allocate_info.get());
357 while (orig_pnext != nullptr) {
358 if (orig_pnext->sType == VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV) {
359 safe_dedicated_allocate_info->initialize(
360 reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>(orig_pnext));
361
362 std::unique_lock<std::mutex> lock(global_lock);
363
364 if (safe_dedicated_allocate_info->buffer != VK_NULL_HANDLE) {
365 uint64_t local_buffer = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->buffer);
366 safe_dedicated_allocate_info->buffer =
367 reinterpret_cast<VkBuffer &>(device_data->unique_id_mapping[local_buffer]);
368 }
369
370 if (safe_dedicated_allocate_info->image != VK_NULL_HANDLE) {
371 uint64_t local_image = reinterpret_cast<uint64_t &>(safe_dedicated_allocate_info->image);
372 safe_dedicated_allocate_info->image = reinterpret_cast<VkImage &>(device_data->unique_id_mapping[local_image]);
373 }
374
375 lock.unlock();
376
377 input_pnext->pNext = reinterpret_cast<GenericHeader *>(safe_dedicated_allocate_info.get());
378 input_pnext = reinterpret_cast<GenericHeader *>(input_pnext->pNext);
379 } else {
380 // TODO: generic handling of pNext copies
381 }
382
383 orig_pnext = reinterpret_cast<const GenericHeader *>(orig_pnext->pNext);
384 }
385 }
386
387 VkResult result = device_data->device_dispatch_table->AllocateMemory(device, input_allocate_info, pAllocator, pMemory);
388
389 if (VK_SUCCESS == result) {
390 std::lock_guard<std::mutex> lock(global_lock);
391 uint64_t unique_id = global_unique_id++;
392 device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pMemory);
393 *pMemory = reinterpret_cast<VkDeviceMemory &>(unique_id);
394 }
395
396 return result;
397}
398
399VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
400 const VkComputePipelineCreateInfo *pCreateInfos,
401 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
402 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
403 safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL;
404 if (pCreateInfos) {
405 std::lock_guard<std::mutex> lock(global_lock);
406 local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount];
407 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
408 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
409 if (pCreateInfos[idx0].basePipelineHandle) {
410 local_pCreateInfos[idx0].basePipelineHandle =
411 (VkPipeline)my_device_data
412 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
413 }
414 if (pCreateInfos[idx0].layout) {
415 local_pCreateInfos[idx0].layout =
416 (VkPipelineLayout)
417 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
418 }
419 if (pCreateInfos[idx0].stage.module) {
420 local_pCreateInfos[idx0].stage.module =
421 (VkShaderModule)
422 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].stage.module)];
423 }
424 }
425 }
426 if (pipelineCache) {
427 std::lock_guard<std::mutex> lock(global_lock);
428 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
429 }
430
431 VkResult result = my_device_data->device_dispatch_table->CreateComputePipelines(
432 device, pipelineCache, createInfoCount, (const VkComputePipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
433 delete[] local_pCreateInfos;
Maciej Jesionowski42200702016-11-23 10:44:34 +0100434 {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600435 uint64_t unique_id = 0;
436 std::lock_guard<std::mutex> lock(global_lock);
437 for (uint32_t i = 0; i < createInfoCount; ++i) {
Maciej Jesionowski42200702016-11-23 10:44:34 +0100438 if (pPipelines[i] != VK_NULL_HANDLE) {
439 unique_id = global_unique_id++;
440 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
441 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
442 }
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600443 }
444 }
445 return result;
446}
447
448VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
449 const VkGraphicsPipelineCreateInfo *pCreateInfos,
450 const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
451 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
452 safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = NULL;
453 if (pCreateInfos) {
454 local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount];
455 std::lock_guard<std::mutex> lock(global_lock);
456 for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
457 local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]);
458 if (pCreateInfos[idx0].basePipelineHandle) {
459 local_pCreateInfos[idx0].basePipelineHandle =
460 (VkPipeline)my_device_data
461 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].basePipelineHandle)];
462 }
463 if (pCreateInfos[idx0].layout) {
464 local_pCreateInfos[idx0].layout =
465 (VkPipelineLayout)
466 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].layout)];
467 }
468 if (pCreateInfos[idx0].pStages) {
469 for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) {
470 if (pCreateInfos[idx0].pStages[idx1].module) {
471 local_pCreateInfos[idx0].pStages[idx1].module =
472 (VkShaderModule)my_device_data
473 ->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].pStages[idx1].module)];
474 }
475 }
476 }
477 if (pCreateInfos[idx0].renderPass) {
478 local_pCreateInfos[idx0].renderPass =
479 (VkRenderPass)
480 my_device_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfos[idx0].renderPass)];
481 }
482 }
483 }
484 if (pipelineCache) {
485 std::lock_guard<std::mutex> lock(global_lock);
486 pipelineCache = (VkPipelineCache)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(pipelineCache)];
487 }
488
489 VkResult result = my_device_data->device_dispatch_table->CreateGraphicsPipelines(
490 device, pipelineCache, createInfoCount, (const VkGraphicsPipelineCreateInfo *)local_pCreateInfos, pAllocator, pPipelines);
491 delete[] local_pCreateInfos;
Maciej Jesionowski42200702016-11-23 10:44:34 +0100492 {
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600493 uint64_t unique_id = 0;
494 std::lock_guard<std::mutex> lock(global_lock);
495 for (uint32_t i = 0; i < createInfoCount; ++i) {
Maciej Jesionowski42200702016-11-23 10:44:34 +0100496 if (pPipelines[i] != VK_NULL_HANDLE) {
497 unique_id = global_unique_id++;
498 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pPipelines[i]);
499 pPipelines[i] = reinterpret_cast<VkPipeline &>(unique_id);
500 }
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600501 }
502 }
503 return result;
504}
505
506VKAPI_ATTR VkResult VKAPI_CALL CreateDebugReportCallbackEXT(VkInstance instance,
507 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
508 const VkAllocationCallbacks *pAllocator,
509 VkDebugReportCallbackEXT *pMsgCallback) {
510 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
511 VkResult result =
512 instance_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
513
514 if (VK_SUCCESS == result) {
515 result = layer_create_msg_callback(instance_data->report_data, false, pCreateInfo, pAllocator, pMsgCallback);
516 }
517 return result;
518}
519
520VKAPI_ATTR void VKAPI_CALL DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback,
521 const VkAllocationCallbacks *pAllocator) {
522 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
523 instance_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, callback, pAllocator);
524 layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator);
525}
526
527VKAPI_ATTR void VKAPI_CALL DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
528 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
529 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
530 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
531 instance_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix,
532 pMsg);
533}
534
535VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo,
536 const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
537 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
538 safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL;
539 if (pCreateInfo) {
540 std::lock_guard<std::mutex> lock(global_lock);
541 local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo);
542 local_pCreateInfo->oldSwapchain =
543 (VkSwapchainKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->oldSwapchain)];
544 // Need to pull surface mapping from the instance-level map
545 layer_data *instance_data = get_my_data_ptr(get_dispatch_key(my_map_data->gpu), layer_data_map);
546 local_pCreateInfo->surface =
547 (VkSurfaceKHR)instance_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pCreateInfo->surface)];
548 }
549
550 VkResult result = my_map_data->device_dispatch_table->CreateSwapchainKHR(
551 device, (const VkSwapchainCreateInfoKHR *)local_pCreateInfo, pAllocator, pSwapchain);
552 if (local_pCreateInfo) {
553 delete local_pCreateInfo;
554 }
555 if (VK_SUCCESS == result) {
556 std::lock_guard<std::mutex> lock(global_lock);
557 uint64_t unique_id = global_unique_id++;
558 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(*pSwapchain);
559 *pSwapchain = reinterpret_cast<VkSwapchainKHR &>(unique_id);
560 }
561 return result;
562}
563
564VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount,
565 VkImage *pSwapchainImages) {
566 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
567 if (VK_NULL_HANDLE != swapchain) {
568 std::lock_guard<std::mutex> lock(global_lock);
569 swapchain = (VkSwapchainKHR)my_device_data->unique_id_mapping[reinterpret_cast<uint64_t &>(swapchain)];
570 }
571 VkResult result =
572 my_device_data->device_dispatch_table->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
573 // TODO : Need to add corresponding code to delete these images
574 if (VK_SUCCESS == result) {
575 if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
576 uint64_t unique_id = 0;
577 std::lock_guard<std::mutex> lock(global_lock);
578 for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
579 unique_id = global_unique_id++;
580 my_device_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(pSwapchainImages[i]);
581 pSwapchainImages[i] = reinterpret_cast<VkImage &>(unique_id);
582 }
583 }
584 }
585 return result;
586}
587
588#ifndef __ANDROID__
589VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
590 VkDisplayPropertiesKHR *pProperties) {
591 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
592 safe_VkDisplayPropertiesKHR *local_pProperties = NULL;
593 {
594 std::lock_guard<std::mutex> lock(global_lock);
595 if (pProperties) {
596 local_pProperties = new safe_VkDisplayPropertiesKHR[*pPropertyCount];
597 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
598 local_pProperties[idx0].initialize(&pProperties[idx0]);
599 if (pProperties[idx0].display) {
600 local_pProperties[idx0].display =
601 (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<const uint64_t &>(pProperties[idx0].display)];
602 }
603 }
604 }
605 }
606
607 VkResult result = my_map_data->instance_dispatch_table->GetPhysicalDeviceDisplayPropertiesKHR(
608 physicalDevice, pPropertyCount, (VkDisplayPropertiesKHR *)local_pProperties);
609 if (result == VK_SUCCESS && pProperties) {
610 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
611 std::lock_guard<std::mutex> lock(global_lock);
612
613 uint64_t unique_id = global_unique_id++;
614 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].display);
615 pProperties[idx0].display = reinterpret_cast<VkDisplayKHR &>(unique_id);
616 pProperties[idx0].displayName = local_pProperties[idx0].displayName;
617 pProperties[idx0].physicalDimensions = local_pProperties[idx0].physicalDimensions;
618 pProperties[idx0].physicalResolution = local_pProperties[idx0].physicalResolution;
619 pProperties[idx0].supportedTransforms = local_pProperties[idx0].supportedTransforms;
620 pProperties[idx0].planeReorderPossible = local_pProperties[idx0].planeReorderPossible;
621 pProperties[idx0].persistentContent = local_pProperties[idx0].persistentContent;
622 }
623 }
624 if (local_pProperties) {
625 delete[] local_pProperties;
626 }
627 return result;
628}
629
630VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex,
631 uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) {
632 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
633 VkResult result = my_map_data->instance_dispatch_table->GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex,
634 pDisplayCount, pDisplays);
635 if (VK_SUCCESS == result) {
636 if ((*pDisplayCount > 0) && pDisplays) {
637 std::lock_guard<std::mutex> lock(global_lock);
638 for (uint32_t i = 0; i < *pDisplayCount; i++) {
639 auto it = my_map_data->unique_id_mapping.find(reinterpret_cast<const uint64_t &>(pDisplays[i]));
640 assert(it != my_map_data->unique_id_mapping.end());
641 pDisplays[i] = reinterpret_cast<VkDisplayKHR &>(it->second);
642 }
643 }
644 }
645 return result;
646}
647
648VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display,
649 uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties) {
650 layer_data *my_map_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
651 safe_VkDisplayModePropertiesKHR *local_pProperties = NULL;
652 {
653 std::lock_guard<std::mutex> lock(global_lock);
654 display = (VkDisplayKHR)my_map_data->unique_id_mapping[reinterpret_cast<uint64_t &>(display)];
655 if (pProperties) {
656 local_pProperties = new safe_VkDisplayModePropertiesKHR[*pPropertyCount];
657 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
658 local_pProperties[idx0].initialize(&pProperties[idx0]);
659 }
660 }
661 }
662
663 VkResult result = my_map_data->instance_dispatch_table->GetDisplayModePropertiesKHR(
664 physicalDevice, display, pPropertyCount, (VkDisplayModePropertiesKHR *)local_pProperties);
665 if (result == VK_SUCCESS && pProperties) {
666 for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) {
667 std::lock_guard<std::mutex> lock(global_lock);
668
669 uint64_t unique_id = global_unique_id++;
670 my_map_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(local_pProperties[idx0].displayMode);
671 pProperties[idx0].displayMode = reinterpret_cast<VkDisplayModeKHR &>(unique_id);
672 pProperties[idx0].parameters.visibleRegion.width = local_pProperties[idx0].parameters.visibleRegion.width;
673 pProperties[idx0].parameters.visibleRegion.height = local_pProperties[idx0].parameters.visibleRegion.height;
674 pProperties[idx0].parameters.refreshRate = local_pProperties[idx0].parameters.refreshRate;
675 }
676 }
677 if (local_pProperties) {
678 delete[] local_pProperties;
679 }
680 return result;
681}
Norbert Nopper1dec9a52016-11-25 07:55:13 +0100682
683VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode,
684 uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities) {
685 layer_data *dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
686 {
687 std::lock_guard<std::mutex> lock(global_lock);
688 auto it = dev_data->unique_id_mapping.find(reinterpret_cast<uint64_t &>(mode));
689 if (it == dev_data->unique_id_mapping.end()) {
690 uint64_t unique_id = global_unique_id++;
691 dev_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t &>(mode);
692
693 mode = reinterpret_cast<VkDisplayModeKHR &>(unique_id);
694 } else {
695 mode = reinterpret_cast<VkDisplayModeKHR &>(it->second);
696 }
697 }
698 VkResult result =
699 dev_data->instance_dispatch_table->GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities);
700 return result;
701}
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -0600702#endif
703
704} // namespace unique_objects
705
706// vk_layer_logging.h expects these to be defined
707VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
708 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
709 const VkAllocationCallbacks *pAllocator,
710 VkDebugReportCallbackEXT *pMsgCallback) {
711 return unique_objects::CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
712}
713
714VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback,
715 const VkAllocationCallbacks *pAllocator) {
716 unique_objects::DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
717}
718
719VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
720 VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location,
721 int32_t msgCode, const char *pLayerPrefix, const char *pMsg) {
722 unique_objects::DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
723}
724
725VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount,
726 VkExtensionProperties *pProperties) {
727 return unique_objects::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties);
728}
729
730VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount,
731 VkLayerProperties *pProperties) {
732 return unique_objects::EnumerateInstanceLayerProperties(pCount, pProperties);
733}
734
735VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount,
736 VkLayerProperties *pProperties) {
737 assert(physicalDevice == VK_NULL_HANDLE);
738 return unique_objects::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties);
739}
740
741VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
742 return unique_objects::GetDeviceProcAddr(dev, funcName);
743}
744
745VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
746 return unique_objects::GetInstanceProcAddr(instance, funcName);
747}
748
749VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
750 const char *pLayerName, uint32_t *pCount,
751 VkExtensionProperties *pProperties) {
752 assert(physicalDevice == VK_NULL_HANDLE);
753 return unique_objects::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties);
754}