Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015-2016 The Khronos Group Inc. |
| 3 | * Copyright (c) 2015-2016 Valve Corporation |
| 4 | * Copyright (c) 2015-2016 LunarG, Inc. |
| 5 | * Copyright (c) 2015-2016 Google, Inc. |
| 6 | * |
| 7 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 8 | * you may not use this file except in compliance with the License. |
| 9 | * You may obtain a copy of the License at |
| 10 | * |
| 11 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 12 | * |
| 13 | * Unless required by applicable law or agreed to in writing, software |
| 14 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 16 | * See the License for the specific language governing permissions and |
| 17 | * limitations under the License. |
| 18 | * |
| 19 | * Author: Tobin Ehlis <tobine@google.com> |
| 20 | * Author: Mark Lobodzinski <mark@lunarg.com> |
| 21 | */ |
| 22 | |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 23 | #define NOMINMAX |
| 24 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 25 | #include <stdio.h> |
| 26 | #include <stdlib.h> |
| 27 | #include <string.h> |
| 28 | #include <unordered_map> |
| 29 | #include <vector> |
| 30 | #include <list> |
| 31 | #include <memory> |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 32 | #include <algorithm> |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 33 | |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 34 | // For Windows, this #include must come before other Vk headers. |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 35 | #include "vk_loader_platform.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 36 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 37 | #include "unique_objects.h" |
| 38 | #include "vk_dispatch_table_helper.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 39 | #include "vk_layer_config.h" |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 40 | #include "vk_layer_data.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 41 | #include "vk_layer_extension_utils.h" |
| 42 | #include "vk_layer_logging.h" |
| 43 | #include "vk_layer_table.h" |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 44 | #include "vk_layer_utils.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 45 | #include "vk_layer_utils.h" |
Mark Lobodzinski | 9acd2e3 | 2016-12-21 15:22:39 -0700 | [diff] [blame] | 46 | #include "vk_enum_string_helper.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 47 | #include "vk_validation_error_messages.h" |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 48 | #include "vk_object_types.h" |
Mark Lobodzinski | 75a4631 | 2018-01-03 11:23:55 -0700 | [diff] [blame] | 49 | #include "vk_extension_helper.h" |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 50 | #include "vulkan/vk_layer.h" |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 51 | |
Mike Stroyan | b985fca | 2016-11-01 11:50:16 -0600 | [diff] [blame] | 52 | // This intentionally includes a cpp file |
| 53 | #include "vk_safe_struct.cpp" |
| 54 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 55 | #include "unique_objects_wrappers.h" |
| 56 | |
| 57 | namespace unique_objects { |
| 58 | |
Mark Young | 3938987 | 2017-01-19 21:10:49 -0700 | [diff] [blame] | 59 | static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION; |
| 60 | |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 61 | static void initUniqueObjects(instance_layer_data *instance_data, const VkAllocationCallbacks *pAllocator) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 62 | layer_debug_actions(instance_data->report_data, instance_data->logging_callback, pAllocator, "google_unique_objects"); |
| 63 | } |
| 64 | |
Mark Lobodzinski | c014147 | 2017-06-09 09:51:34 -0600 | [diff] [blame] | 65 | // Check enabled instance extensions against supported instance extension whitelist |
| 66 | static void InstanceExtensionWhitelist(const VkInstanceCreateInfo *pCreateInfo, VkInstance instance) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 67 | instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 68 | |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 69 | for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 70 | // Check for recognized instance extensions |
Mark Lobodzinski | 75a4631 | 2018-01-03 11:23:55 -0700 | [diff] [blame] | 71 | if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kInstanceExtensionNames)) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 72 | log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 73 | VALIDATION_ERROR_UNDEFINED, "UniqueObjects", |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 74 | "Instance Extension %s is not supported by this layer. Using this extension may adversely affect validation " |
| 75 | "results and/or produce undefined behavior.", |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 76 | pCreateInfo->ppEnabledExtensionNames[i]); |
| 77 | } |
| 78 | } |
| 79 | } |
| 80 | |
Mark Lobodzinski | c014147 | 2017-06-09 09:51:34 -0600 | [diff] [blame] | 81 | // Check enabled device extensions against supported device extension whitelist |
| 82 | static void DeviceExtensionWhitelist(const VkDeviceCreateInfo *pCreateInfo, VkDevice device) { |
Tobin Ehlis | 8d6acde | 2017-02-08 07:40:40 -0700 | [diff] [blame] | 83 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 84 | |
| 85 | for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 86 | // Check for recognized device extensions |
Mark Lobodzinski | 75a4631 | 2018-01-03 11:23:55 -0700 | [diff] [blame] | 87 | if (!white_list(pCreateInfo->ppEnabledExtensionNames[i], kDeviceExtensionNames)) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 88 | log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, |
Mike Weiblen | 6a27de5 | 2016-12-09 17:36:28 -0700 | [diff] [blame] | 89 | VALIDATION_ERROR_UNDEFINED, "UniqueObjects", |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 90 | "Device Extension %s is not supported by this layer. Using this extension may adversely affect validation " |
| 91 | "results and/or produce undefined behavior.", |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 92 | pCreateInfo->ppEnabledExtensionNames[i]); |
| 93 | } |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | VKAPI_ATTR VkResult VKAPI_CALL CreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, |
| 98 | VkInstance *pInstance) { |
| 99 | VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); |
| 100 | |
| 101 | assert(chain_info->u.pLayerInfo); |
| 102 | PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; |
| 103 | PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance"); |
| 104 | if (fpCreateInstance == NULL) { |
| 105 | return VK_ERROR_INITIALIZATION_FAILED; |
| 106 | } |
| 107 | |
| 108 | // Advance the link info for the next element on the chain |
| 109 | chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; |
| 110 | |
| 111 | VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance); |
| 112 | if (result != VK_SUCCESS) { |
| 113 | return result; |
| 114 | } |
| 115 | |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 116 | instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(*pInstance), instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 117 | instance_data->instance = *pInstance; |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 118 | layer_init_instance_dispatch_table(*pInstance, &instance_data->dispatch_table, fpGetInstanceProcAddr); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 119 | |
| 120 | instance_data->instance = *pInstance; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 121 | instance_data->report_data = debug_report_create_instance( |
| 122 | &instance_data->dispatch_table, *pInstance, pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 123 | |
| 124 | // Set up temporary debug callbacks to output messages at CreateInstance-time |
| 125 | if (!layer_copy_tmp_callbacks(pCreateInfo->pNext, &instance_data->num_tmp_callbacks, &instance_data->tmp_dbg_create_infos, |
| 126 | &instance_data->tmp_callbacks)) { |
| 127 | if (instance_data->num_tmp_callbacks > 0) { |
| 128 | if (layer_enable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, |
| 129 | instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks)) { |
| 130 | layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks); |
| 131 | instance_data->num_tmp_callbacks = 0; |
| 132 | } |
| 133 | } |
| 134 | } |
| 135 | |
| 136 | initUniqueObjects(instance_data, pAllocator); |
Mark Lobodzinski | c014147 | 2017-06-09 09:51:34 -0600 | [diff] [blame] | 137 | InstanceExtensionWhitelist(pCreateInfo, *pInstance); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 138 | |
| 139 | // Disable and free tmp callbacks, no longer necessary |
| 140 | if (instance_data->num_tmp_callbacks > 0) { |
| 141 | layer_disable_tmp_callbacks(instance_data->report_data, instance_data->num_tmp_callbacks, instance_data->tmp_callbacks); |
| 142 | layer_free_tmp_callbacks(instance_data->tmp_dbg_create_infos, instance_data->tmp_callbacks); |
| 143 | instance_data->num_tmp_callbacks = 0; |
| 144 | } |
| 145 | |
| 146 | return result; |
| 147 | } |
| 148 | |
| 149 | VKAPI_ATTR void VKAPI_CALL DestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) { |
| 150 | dispatch_key key = get_dispatch_key(instance); |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 151 | instance_layer_data *instance_data = GetLayerDataPtr(key, instance_layer_data_map); |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 152 | VkLayerInstanceDispatchTable *disp_table = &instance_data->dispatch_table; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 153 | disp_table->DestroyInstance(instance, pAllocator); |
| 154 | |
| 155 | // Clean up logging callback, if any |
| 156 | while (instance_data->logging_callback.size() > 0) { |
| 157 | VkDebugReportCallbackEXT callback = instance_data->logging_callback.back(); |
| 158 | layer_destroy_msg_callback(instance_data->report_data, callback, pAllocator); |
| 159 | instance_data->logging_callback.pop_back(); |
| 160 | } |
| 161 | |
| 162 | layer_debug_report_destroy_instance(instance_data->report_data); |
GabrÃel Arthúr Pétursson | 2c5e750 | 2017-06-03 23:27:59 +0000 | [diff] [blame] | 163 | FreeLayerDataPtr(key, instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 164 | } |
| 165 | |
| 166 | VKAPI_ATTR VkResult VKAPI_CALL CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, |
| 167 | const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 168 | instance_layer_data *my_instance_data = GetLayerDataPtr(get_dispatch_key(gpu), instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 169 | VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO); |
| 170 | |
| 171 | assert(chain_info->u.pLayerInfo); |
| 172 | PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr; |
| 173 | PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr; |
| 174 | PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice)fpGetInstanceProcAddr(my_instance_data->instance, "vkCreateDevice"); |
| 175 | if (fpCreateDevice == NULL) { |
| 176 | return VK_ERROR_INITIALIZATION_FAILED; |
| 177 | } |
| 178 | |
| 179 | // Advance the link info for the next element on the chain |
| 180 | chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext; |
| 181 | |
| 182 | VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice); |
| 183 | if (result != VK_SUCCESS) { |
| 184 | return result; |
| 185 | } |
| 186 | |
Tobin Ehlis | 8d6acde | 2017-02-08 07:40:40 -0700 | [diff] [blame] | 187 | layer_data *my_device_data = GetLayerDataPtr(get_dispatch_key(*pDevice), layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 188 | my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice); |
| 189 | |
| 190 | // Setup layer's device dispatch table |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 191 | layer_init_device_dispatch_table(*pDevice, &my_device_data->dispatch_table, fpGetDeviceProcAddr); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 192 | |
Mark Lobodzinski | c014147 | 2017-06-09 09:51:34 -0600 | [diff] [blame] | 193 | DeviceExtensionWhitelist(pCreateInfo, *pDevice); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 194 | |
Mark Lobodzinski | c014147 | 2017-06-09 09:51:34 -0600 | [diff] [blame] | 195 | // Set gpu for this device in order to get at any objects mapped at instance level |
Chris Forbes | 7fcfde1 | 2017-05-02 16:54:24 -0700 | [diff] [blame] | 196 | my_device_data->instance_data = my_instance_data; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 197 | |
| 198 | return result; |
| 199 | } |
| 200 | |
| 201 | VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { |
| 202 | dispatch_key key = get_dispatch_key(device); |
Tobin Ehlis | 8d6acde | 2017-02-08 07:40:40 -0700 | [diff] [blame] | 203 | layer_data *dev_data = GetLayerDataPtr(key, layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 204 | |
| 205 | layer_debug_report_destroy_device(device); |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 206 | dev_data->dispatch_table.DestroyDevice(device, pAllocator); |
GabrÃel Arthúr Pétursson | 2c5e750 | 2017-06-03 23:27:59 +0000 | [diff] [blame] | 207 | |
| 208 | FreeLayerDataPtr(key, layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 209 | } |
| 210 | |
| 211 | static const VkLayerProperties globalLayerProps = {"VK_LAYER_GOOGLE_unique_objects", |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 212 | VK_LAYER_API_VERSION, // specVersion |
| 213 | 1, // implementationVersion |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 214 | "Google Validation Layer"}; |
| 215 | |
Mark Young | 3938987 | 2017-01-19 21:10:49 -0700 | [diff] [blame] | 216 | /// Declare prototype for these functions |
| 217 | VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName); |
| 218 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 219 | VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) { |
| 220 | return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties); |
| 221 | } |
| 222 | |
| 223 | VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, |
| 224 | VkLayerProperties *pProperties) { |
| 225 | return util_GetLayerProperties(1, &globalLayerProps, pCount, pProperties); |
| 226 | } |
| 227 | |
| 228 | VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, |
| 229 | VkExtensionProperties *pProperties) { |
| 230 | if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName)) |
| 231 | return util_GetExtensionProperties(0, NULL, pCount, pProperties); |
| 232 | |
| 233 | return VK_ERROR_LAYER_NOT_PRESENT; |
| 234 | } |
| 235 | |
| 236 | VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, |
| 237 | uint32_t *pCount, VkExtensionProperties *pProperties) { |
| 238 | if (pLayerName && !strcmp(pLayerName, globalLayerProps.layerName)) |
| 239 | return util_GetExtensionProperties(0, nullptr, pCount, pProperties); |
| 240 | |
| 241 | assert(physicalDevice); |
| 242 | |
| 243 | dispatch_key key = get_dispatch_key(physicalDevice); |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 244 | instance_layer_data *instance_data = GetLayerDataPtr(key, instance_layer_data_map); |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 245 | return instance_data->dispatch_table.EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount, pProperties); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 246 | } |
| 247 | |
| 248 | VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) { |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 249 | const auto item = name_to_funcptr_map.find(funcName); |
| 250 | if (item != name_to_funcptr_map.end()) { |
| 251 | return reinterpret_cast<PFN_vkVoidFunction>(item->second); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 252 | } |
| 253 | |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 254 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 255 | const auto &table = device_data->dispatch_table; |
| 256 | if (!table.GetDeviceProcAddr) return nullptr; |
| 257 | return table.GetDeviceProcAddr(device, funcName); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 258 | } |
| 259 | |
| 260 | VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) { |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 261 | const auto item = name_to_funcptr_map.find(funcName); |
| 262 | if (item != name_to_funcptr_map.end()) { |
| 263 | return reinterpret_cast<PFN_vkVoidFunction>(item->second); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 264 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 265 | |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 266 | instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map); |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 267 | const auto &table = instance_data->dispatch_table; |
| 268 | if (!table.GetInstanceProcAddr) return nullptr; |
| 269 | return table.GetInstanceProcAddr(instance, funcName); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 270 | } |
| 271 | |
Mark Lobodzinski | 38686e9 | 2017-06-07 16:04:50 -0600 | [diff] [blame] | 272 | VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 273 | instance_layer_data *instance_data = GetLayerDataPtr(get_dispatch_key(instance), instance_layer_data_map); |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 274 | VkLayerInstanceDispatchTable *disp_table = &instance_data->dispatch_table; |
Mark Young | 3938987 | 2017-01-19 21:10:49 -0700 | [diff] [blame] | 275 | if (disp_table->GetPhysicalDeviceProcAddr == NULL) { |
| 276 | return NULL; |
| 277 | } |
| 278 | return disp_table->GetPhysicalDeviceProcAddr(instance, funcName); |
| 279 | } |
| 280 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 281 | VKAPI_ATTR VkResult VKAPI_CALL CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, |
| 282 | const VkComputePipelineCreateInfo *pCreateInfos, |
| 283 | const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 284 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 285 | safe_VkComputePipelineCreateInfo *local_pCreateInfos = NULL; |
| 286 | if (pCreateInfos) { |
| 287 | std::lock_guard<std::mutex> lock(global_lock); |
| 288 | local_pCreateInfos = new safe_VkComputePipelineCreateInfo[createInfoCount]; |
| 289 | for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) { |
| 290 | local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0]); |
| 291 | if (pCreateInfos[idx0].basePipelineHandle) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 292 | local_pCreateInfos[idx0].basePipelineHandle = Unwrap(pCreateInfos[idx0].basePipelineHandle); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 293 | } |
| 294 | if (pCreateInfos[idx0].layout) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 295 | local_pCreateInfos[idx0].layout = Unwrap(pCreateInfos[idx0].layout); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 296 | } |
| 297 | if (pCreateInfos[idx0].stage.module) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 298 | local_pCreateInfos[idx0].stage.module = Unwrap(pCreateInfos[idx0].stage.module); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 299 | } |
| 300 | } |
| 301 | } |
| 302 | if (pipelineCache) { |
| 303 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 304 | pipelineCache = Unwrap(pipelineCache); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 305 | } |
| 306 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 307 | VkResult result = device_data->dispatch_table.CreateComputePipelines(device, pipelineCache, createInfoCount, |
| 308 | local_pCreateInfos->ptr(), pAllocator, pPipelines); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 309 | delete[] local_pCreateInfos; |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 310 | { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 311 | std::lock_guard<std::mutex> lock(global_lock); |
| 312 | for (uint32_t i = 0; i < createInfoCount; ++i) { |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 313 | if (pPipelines[i] != VK_NULL_HANDLE) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 314 | pPipelines[i] = WrapNew(pPipelines[i]); |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 315 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 316 | } |
| 317 | } |
| 318 | return result; |
| 319 | } |
| 320 | |
| 321 | VKAPI_ATTR VkResult VKAPI_CALL CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, |
| 322 | const VkGraphicsPipelineCreateInfo *pCreateInfos, |
| 323 | const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 324 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 325 | safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 326 | if (pCreateInfos) { |
| 327 | local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount]; |
| 328 | std::lock_guard<std::mutex> lock(global_lock); |
| 329 | for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) { |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 330 | bool uses_color_attachment = false; |
| 331 | bool uses_depthstencil_attachment = false; |
| 332 | { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 333 | const auto subpasses_uses_it = device_data->renderpasses_states.find(Unwrap(pCreateInfos[idx0].renderPass)); |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 334 | if (subpasses_uses_it != device_data->renderpasses_states.end()) { |
| 335 | const auto &subpasses_uses = subpasses_uses_it->second; |
| 336 | if (subpasses_uses.subpasses_using_color_attachment.count(pCreateInfos[idx0].subpass)) |
| 337 | uses_color_attachment = true; |
| 338 | if (subpasses_uses.subpasses_using_depthstencil_attachment.count(pCreateInfos[idx0].subpass)) |
| 339 | uses_depthstencil_attachment = true; |
| 340 | } |
| 341 | } |
| 342 | |
| 343 | local_pCreateInfos[idx0].initialize(&pCreateInfos[idx0], uses_color_attachment, uses_depthstencil_attachment); |
| 344 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 345 | if (pCreateInfos[idx0].basePipelineHandle) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 346 | local_pCreateInfos[idx0].basePipelineHandle = Unwrap(pCreateInfos[idx0].basePipelineHandle); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 347 | } |
| 348 | if (pCreateInfos[idx0].layout) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 349 | local_pCreateInfos[idx0].layout = Unwrap(pCreateInfos[idx0].layout); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 350 | } |
| 351 | if (pCreateInfos[idx0].pStages) { |
| 352 | for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount; ++idx1) { |
| 353 | if (pCreateInfos[idx0].pStages[idx1].module) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 354 | local_pCreateInfos[idx0].pStages[idx1].module = Unwrap(pCreateInfos[idx0].pStages[idx1].module); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 355 | } |
| 356 | } |
| 357 | } |
| 358 | if (pCreateInfos[idx0].renderPass) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 359 | local_pCreateInfos[idx0].renderPass = Unwrap(pCreateInfos[idx0].renderPass); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 360 | } |
| 361 | } |
| 362 | } |
| 363 | if (pipelineCache) { |
| 364 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 365 | pipelineCache = Unwrap(pipelineCache); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 366 | } |
| 367 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 368 | VkResult result = device_data->dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount, |
| 369 | local_pCreateInfos->ptr(), pAllocator, pPipelines); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 370 | delete[] local_pCreateInfos; |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 371 | { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 372 | std::lock_guard<std::mutex> lock(global_lock); |
| 373 | for (uint32_t i = 0; i < createInfoCount; ++i) { |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 374 | if (pPipelines[i] != VK_NULL_HANDLE) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 375 | pPipelines[i] = WrapNew(pPipelines[i]); |
Maciej Jesionowski | 4220070 | 2016-11-23 10:44:34 +0100 | [diff] [blame] | 376 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 377 | } |
| 378 | } |
| 379 | return result; |
| 380 | } |
| 381 | |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 382 | static void PostCallCreateRenderPass(layer_data *dev_data, const VkRenderPassCreateInfo *pCreateInfo, VkRenderPass renderPass) { |
| 383 | auto &renderpass_state = dev_data->renderpasses_states[renderPass]; |
| 384 | |
| 385 | for (uint32_t subpass = 0; subpass < pCreateInfo->subpassCount; ++subpass) { |
| 386 | bool uses_color = false; |
| 387 | for (uint32_t i = 0; i < pCreateInfo->pSubpasses[subpass].colorAttachmentCount && !uses_color; ++i) |
| 388 | if (pCreateInfo->pSubpasses[subpass].pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) uses_color = true; |
| 389 | |
| 390 | bool uses_depthstencil = false; |
| 391 | if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment) |
| 392 | if (pCreateInfo->pSubpasses[subpass].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) |
| 393 | uses_depthstencil = true; |
| 394 | |
| 395 | if (uses_color) renderpass_state.subpasses_using_color_attachment.insert(subpass); |
| 396 | if (uses_depthstencil) renderpass_state.subpasses_using_depthstencil_attachment.insert(subpass); |
| 397 | } |
| 398 | } |
| 399 | |
| 400 | VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, |
| 401 | const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) { |
| 402 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 403 | VkResult result = dev_data->dispatch_table.CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); |
| 404 | if (VK_SUCCESS == result) { |
| 405 | std::lock_guard<std::mutex> lock(global_lock); |
| 406 | |
| 407 | PostCallCreateRenderPass(dev_data, pCreateInfo, *pRenderPass); |
| 408 | |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 409 | *pRenderPass = WrapNew(*pRenderPass); |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 410 | } |
| 411 | return result; |
| 412 | } |
| 413 | |
| 414 | static void PostCallDestroyRenderPass(layer_data *dev_data, VkRenderPass renderPass) { |
| 415 | dev_data->renderpasses_states.erase(renderPass); |
| 416 | } |
| 417 | |
| 418 | VKAPI_ATTR void VKAPI_CALL DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) { |
| 419 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 420 | std::unique_lock<std::mutex> lock(global_lock); |
| 421 | uint64_t renderPass_id = reinterpret_cast<uint64_t &>(renderPass); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 422 | renderPass = (VkRenderPass)unique_id_mapping[renderPass_id]; |
| 423 | unique_id_mapping.erase(renderPass_id); |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 424 | lock.unlock(); |
| 425 | dev_data->dispatch_table.DestroyRenderPass(device, renderPass, pAllocator); |
| 426 | |
| 427 | lock.lock(); |
| 428 | PostCallDestroyRenderPass(dev_data, renderPass); |
| 429 | } |
| 430 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 431 | VKAPI_ATTR VkResult VKAPI_CALL CreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, |
| 432 | const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) { |
Tobin Ehlis | 8d6acde | 2017-02-08 07:40:40 -0700 | [diff] [blame] | 433 | layer_data *my_map_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 434 | safe_VkSwapchainCreateInfoKHR *local_pCreateInfo = NULL; |
| 435 | if (pCreateInfo) { |
| 436 | std::lock_guard<std::mutex> lock(global_lock); |
| 437 | local_pCreateInfo = new safe_VkSwapchainCreateInfoKHR(pCreateInfo); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 438 | local_pCreateInfo->oldSwapchain = Unwrap(pCreateInfo->oldSwapchain); |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 439 | // Surface is instance-level object |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 440 | local_pCreateInfo->surface = Unwrap(pCreateInfo->surface); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 441 | } |
| 442 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 443 | VkResult result = my_map_data->dispatch_table.CreateSwapchainKHR(device, local_pCreateInfo->ptr(), pAllocator, pSwapchain); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 444 | if (local_pCreateInfo) { |
| 445 | delete local_pCreateInfo; |
| 446 | } |
| 447 | if (VK_SUCCESS == result) { |
| 448 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 449 | *pSwapchain = WrapNew(*pSwapchain); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 450 | } |
| 451 | return result; |
| 452 | } |
| 453 | |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 454 | VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount, |
| 455 | const VkSwapchainCreateInfoKHR *pCreateInfos, |
| 456 | const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains) { |
| 457 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 458 | safe_VkSwapchainCreateInfoKHR *local_pCreateInfos = NULL; |
| 459 | { |
| 460 | std::lock_guard<std::mutex> lock(global_lock); |
| 461 | if (pCreateInfos) { |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 462 | local_pCreateInfos = new safe_VkSwapchainCreateInfoKHR[swapchainCount]; |
| 463 | for (uint32_t i = 0; i < swapchainCount; ++i) { |
| 464 | local_pCreateInfos[i].initialize(&pCreateInfos[i]); |
| 465 | if (pCreateInfos[i].surface) { |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 466 | // Surface is instance-level object |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 467 | local_pCreateInfos[i].surface = Unwrap(pCreateInfos[i].surface); |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 468 | } |
| 469 | if (pCreateInfos[i].oldSwapchain) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 470 | local_pCreateInfos[i].oldSwapchain = Unwrap(pCreateInfos[i].oldSwapchain); |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 471 | } |
| 472 | } |
| 473 | } |
| 474 | } |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 475 | VkResult result = dev_data->dispatch_table.CreateSharedSwapchainsKHR(device, swapchainCount, local_pCreateInfos->ptr(), |
| 476 | pAllocator, pSwapchains); |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 477 | if (local_pCreateInfos) delete[] local_pCreateInfos; |
| 478 | if (VK_SUCCESS == result) { |
| 479 | std::lock_guard<std::mutex> lock(global_lock); |
| 480 | for (uint32_t i = 0; i < swapchainCount; i++) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 481 | pSwapchains[i] = WrapNew(pSwapchains[i]); |
Dustin Graves | 9a6eb05 | 2017-03-28 14:18:54 -0600 | [diff] [blame] | 482 | } |
| 483 | } |
| 484 | return result; |
| 485 | } |
| 486 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 487 | VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, |
| 488 | VkImage *pSwapchainImages) { |
Tobin Ehlis | 8d6acde | 2017-02-08 07:40:40 -0700 | [diff] [blame] | 489 | layer_data *my_device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 490 | VkSwapchainKHR wrapped_swapchain_handle = swapchain; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 491 | if (VK_NULL_HANDLE != swapchain) { |
| 492 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 493 | swapchain = Unwrap(swapchain); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 494 | } |
| 495 | VkResult result = |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 496 | my_device_data->dispatch_table.GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 497 | if ((VK_SUCCESS == result) || (VK_INCOMPLETE == result)) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 498 | if ((*pSwapchainImageCount > 0) && pSwapchainImages) { |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 499 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 500 | auto &wrapped_swapchain_image_handles = my_device_data->swapchain_wrapped_image_handle_map[wrapped_swapchain_handle]; |
| 501 | for (uint32_t i = static_cast<uint32_t>(wrapped_swapchain_image_handles.size()); i < *pSwapchainImageCount; i++) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 502 | wrapped_swapchain_image_handles.emplace_back(WrapNew(pSwapchainImages[i])); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 503 | } |
| 504 | for (uint32_t i = 0; i < *pSwapchainImageCount; i++) { |
| 505 | pSwapchainImages[i] = wrapped_swapchain_image_handles[i]; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 506 | } |
| 507 | } |
| 508 | } |
| 509 | return result; |
| 510 | } |
| 511 | |
Mark Lobodzinski | 1ce83f4 | 2018-02-16 09:58:07 -0700 | [diff] [blame] | 512 | VKAPI_ATTR void VKAPI_CALL DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) { |
| 513 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 514 | std::unique_lock<std::mutex> lock(global_lock); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 515 | |
| 516 | auto &image_array = dev_data->swapchain_wrapped_image_handle_map[swapchain]; |
| 517 | for (auto &image_handle : image_array) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 518 | unique_id_mapping.erase(HandleToUint64(image_handle)); |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 519 | } |
| 520 | dev_data->swapchain_wrapped_image_handle_map.erase(swapchain); |
| 521 | |
| 522 | uint64_t swapchain_id = HandleToUint64(swapchain); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 523 | swapchain = (VkSwapchainKHR)unique_id_mapping[swapchain_id]; |
| 524 | unique_id_mapping.erase(swapchain_id); |
Mark Lobodzinski | 1ce83f4 | 2018-02-16 09:58:07 -0700 | [diff] [blame] | 525 | lock.unlock(); |
| 526 | dev_data->dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator); |
| 527 | } |
| 528 | |
Chris Forbes | 0f507f2 | 2017-04-16 13:13:17 +1200 | [diff] [blame] | 529 | VKAPI_ATTR VkResult VKAPI_CALL QueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) { |
| 530 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(queue), layer_data_map); |
| 531 | safe_VkPresentInfoKHR *local_pPresentInfo = NULL; |
| 532 | { |
| 533 | std::lock_guard<std::mutex> lock(global_lock); |
| 534 | if (pPresentInfo) { |
| 535 | local_pPresentInfo = new safe_VkPresentInfoKHR(pPresentInfo); |
| 536 | if (local_pPresentInfo->pWaitSemaphores) { |
| 537 | for (uint32_t index1 = 0; index1 < local_pPresentInfo->waitSemaphoreCount; ++index1) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 538 | local_pPresentInfo->pWaitSemaphores[index1] = Unwrap(pPresentInfo->pWaitSemaphores[index1]); |
Chris Forbes | 0f507f2 | 2017-04-16 13:13:17 +1200 | [diff] [blame] | 539 | } |
| 540 | } |
| 541 | if (local_pPresentInfo->pSwapchains) { |
| 542 | for (uint32_t index1 = 0; index1 < local_pPresentInfo->swapchainCount; ++index1) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 543 | local_pPresentInfo->pSwapchains[index1] = Unwrap(pPresentInfo->pSwapchains[index1]); |
Chris Forbes | 0f507f2 | 2017-04-16 13:13:17 +1200 | [diff] [blame] | 544 | } |
| 545 | } |
| 546 | } |
| 547 | } |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 548 | VkResult result = dev_data->dispatch_table.QueuePresentKHR(queue, local_pPresentInfo->ptr()); |
Chris Forbes | 0f507f2 | 2017-04-16 13:13:17 +1200 | [diff] [blame] | 549 | |
| 550 | // pResults is an output array embedded in a structure. The code generator neglects to copy back from the safe_* version, |
| 551 | // so handle it as a special case here: |
| 552 | if (pPresentInfo && pPresentInfo->pResults) { |
| 553 | for (uint32_t i = 0; i < pPresentInfo->swapchainCount; i++) { |
| 554 | pPresentInfo->pResults[i] = local_pPresentInfo->pResults[i]; |
| 555 | } |
| 556 | } |
| 557 | |
| 558 | if (local_pPresentInfo) delete local_pPresentInfo; |
| 559 | return result; |
| 560 | } |
| 561 | |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 562 | VKAPI_ATTR VkResult VKAPI_CALL CreateDescriptorUpdateTemplateKHR(VkDevice device, |
| 563 | const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, |
| 564 | const VkAllocationCallbacks *pAllocator, |
| 565 | VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) { |
| 566 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | 94d9e8c | 2017-03-06 16:18:19 -0700 | [diff] [blame] | 567 | safe_VkDescriptorUpdateTemplateCreateInfoKHR *local_create_info = NULL; |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 568 | { |
| 569 | std::lock_guard<std::mutex> lock(global_lock); |
| 570 | if (pCreateInfo) { |
Mark Lobodzinski | 94d9e8c | 2017-03-06 16:18:19 -0700 | [diff] [blame] | 571 | local_create_info = new safe_VkDescriptorUpdateTemplateCreateInfoKHR(pCreateInfo); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 572 | if (pCreateInfo->descriptorSetLayout) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 573 | local_create_info->descriptorSetLayout = Unwrap(pCreateInfo->descriptorSetLayout); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 574 | } |
| 575 | if (pCreateInfo->pipelineLayout) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 576 | local_create_info->pipelineLayout = Unwrap(pCreateInfo->pipelineLayout); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 577 | } |
| 578 | } |
| 579 | } |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 580 | VkResult result = dev_data->dispatch_table.CreateDescriptorUpdateTemplateKHR(device, local_create_info->ptr(), pAllocator, |
| 581 | pDescriptorUpdateTemplate); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 582 | if (VK_SUCCESS == result) { |
| 583 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 584 | *pDescriptorUpdateTemplate = WrapNew(*pDescriptorUpdateTemplate); |
Mark Lobodzinski | 4f3ce67 | 2017-03-03 10:28:21 -0700 | [diff] [blame] | 585 | |
| 586 | // Shadow template createInfo for later updates |
Mark Lobodzinski | 94d9e8c | 2017-03-06 16:18:19 -0700 | [diff] [blame] | 587 | std::unique_ptr<TEMPLATE_STATE> template_state(new TEMPLATE_STATE(*pDescriptorUpdateTemplate, local_create_info)); |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 588 | dev_data->desc_template_map[(uint64_t)*pDescriptorUpdateTemplate] = std::move(template_state); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 589 | } |
| 590 | return result; |
| 591 | } |
| 592 | |
| 593 | VKAPI_ATTR void VKAPI_CALL DestroyDescriptorUpdateTemplateKHR(VkDevice device, |
| 594 | VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, |
| 595 | const VkAllocationCallbacks *pAllocator) { |
| 596 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 597 | std::unique_lock<std::mutex> lock(global_lock); |
Mark Lobodzinski | 94d9e8c | 2017-03-06 16:18:19 -0700 | [diff] [blame] | 598 | uint64_t descriptor_update_template_id = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate); |
| 599 | dev_data->desc_template_map.erase(descriptor_update_template_id); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 600 | descriptorUpdateTemplate = (VkDescriptorUpdateTemplateKHR)unique_id_mapping[descriptor_update_template_id]; |
| 601 | unique_id_mapping.erase(descriptor_update_template_id); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 602 | lock.unlock(); |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 603 | dev_data->dispatch_table.DestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 604 | } |
| 605 | |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 606 | void *BuildUnwrappedUpdateTemplateBuffer(layer_data *dev_data, uint64_t descriptorUpdateTemplate, const void *pData) { |
| 607 | auto const template_map_entry = dev_data->desc_template_map.find(descriptorUpdateTemplate); |
| 608 | if (template_map_entry == dev_data->desc_template_map.end()) { |
| 609 | assert(0); |
| 610 | } |
| 611 | auto const &create_info = template_map_entry->second->create_info; |
| 612 | size_t allocation_size = 0; |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 613 | std::vector<std::tuple<size_t, VulkanObjectType, void *>> template_entries; |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 614 | |
| 615 | for (uint32_t i = 0; i < create_info.descriptorUpdateEntryCount; i++) { |
| 616 | for (uint32_t j = 0; j < create_info.pDescriptorUpdateEntries[i].descriptorCount; j++) { |
| 617 | size_t offset = create_info.pDescriptorUpdateEntries[i].offset + j * create_info.pDescriptorUpdateEntries[i].stride; |
| 618 | char *update_entry = (char *)(pData) + offset; |
| 619 | |
| 620 | switch (create_info.pDescriptorUpdateEntries[i].descriptorType) { |
| 621 | case VK_DESCRIPTOR_TYPE_SAMPLER: |
| 622 | case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: |
| 623 | case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: |
| 624 | case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: |
| 625 | case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: { |
| 626 | auto image_entry = reinterpret_cast<VkDescriptorImageInfo *>(update_entry); |
| 627 | allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorImageInfo)); |
| 628 | |
| 629 | VkDescriptorImageInfo *wrapped_entry = new VkDescriptorImageInfo(*image_entry); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 630 | wrapped_entry->sampler = Unwrap(image_entry->sampler); |
| 631 | wrapped_entry->imageView = Unwrap(image_entry->imageView); |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 632 | template_entries.emplace_back(offset, kVulkanObjectTypeImage, reinterpret_cast<void *>(wrapped_entry)); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 633 | } break; |
| 634 | |
| 635 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: |
| 636 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: |
| 637 | case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: |
| 638 | case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: { |
| 639 | auto buffer_entry = reinterpret_cast<VkDescriptorBufferInfo *>(update_entry); |
| 640 | allocation_size = std::max(allocation_size, offset + sizeof(VkDescriptorBufferInfo)); |
| 641 | |
| 642 | VkDescriptorBufferInfo *wrapped_entry = new VkDescriptorBufferInfo(*buffer_entry); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 643 | wrapped_entry->buffer = Unwrap(buffer_entry->buffer); |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 644 | template_entries.emplace_back(offset, kVulkanObjectTypeBuffer, reinterpret_cast<void *>(wrapped_entry)); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 645 | } break; |
| 646 | |
| 647 | case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: |
| 648 | case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: { |
Chris Forbes | 6956a31 | 2017-05-02 17:36:28 -0700 | [diff] [blame] | 649 | auto buffer_view_handle = reinterpret_cast<VkBufferView *>(update_entry); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 650 | allocation_size = std::max(allocation_size, offset + sizeof(VkBufferView)); |
| 651 | |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 652 | VkBufferView wrapped_entry = Unwrap(*buffer_view_handle); |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 653 | template_entries.emplace_back(offset, kVulkanObjectTypeBufferView, reinterpret_cast<void *>(wrapped_entry)); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 654 | } break; |
| 655 | default: |
| 656 | assert(0); |
| 657 | break; |
| 658 | } |
| 659 | } |
| 660 | } |
| 661 | // Allocate required buffer size and populate with source/unwrapped data |
| 662 | void *unwrapped_data = malloc(allocation_size); |
| 663 | for (auto &this_entry : template_entries) { |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 664 | VulkanObjectType type = std::get<1>(this_entry); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 665 | void *destination = (char *)unwrapped_data + std::get<0>(this_entry); |
| 666 | void *source = (char *)std::get<2>(this_entry); |
| 667 | |
| 668 | switch (type) { |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 669 | case kVulkanObjectTypeImage: |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 670 | *(reinterpret_cast<VkDescriptorImageInfo *>(destination)) = *(reinterpret_cast<VkDescriptorImageInfo *>(source)); |
| 671 | delete reinterpret_cast<VkDescriptorImageInfo *>(source); |
| 672 | break; |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 673 | case kVulkanObjectTypeBuffer: |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 674 | *(reinterpret_cast<VkDescriptorBufferInfo *>(destination)) = *(reinterpret_cast<VkDescriptorBufferInfo *>(source)); |
| 675 | delete reinterpret_cast<VkDescriptorBufferInfo *>(source); |
| 676 | break; |
Mark Lobodzinski | 2d9de65 | 2017-04-24 08:58:52 -0600 | [diff] [blame] | 677 | case kVulkanObjectTypeBufferView: |
Mark Lobodzinski | d5197d0 | 2017-03-15 13:13:49 -0600 | [diff] [blame] | 678 | *(reinterpret_cast<VkBufferView *>(destination)) = reinterpret_cast<VkBufferView>(source); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 679 | break; |
| 680 | default: |
| 681 | assert(0); |
| 682 | break; |
| 683 | } |
| 684 | } |
| 685 | return (void *)unwrapped_data; |
| 686 | } |
| 687 | |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 688 | VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, |
| 689 | VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, |
| 690 | const void *pData) { |
| 691 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
Mark Lobodzinski | 94d9e8c | 2017-03-06 16:18:19 -0700 | [diff] [blame] | 692 | uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate); |
Mark Lobodzinski | c8a0c9b | 2017-11-13 09:42:58 -0700 | [diff] [blame] | 693 | void *unwrapped_buffer = nullptr; |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 694 | { |
| 695 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 696 | descriptorSet = Unwrap(descriptorSet); |
| 697 | descriptorUpdateTemplate = (VkDescriptorUpdateTemplateKHR)unique_id_mapping[template_handle]; |
Mark Lobodzinski | c8a0c9b | 2017-11-13 09:42:58 -0700 | [diff] [blame] | 698 | unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(dev_data, template_handle, pData); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 699 | } |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 700 | dev_data->dispatch_table.UpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, unwrapped_buffer); |
Mark Lobodzinski | b523f7c | 2017-03-06 09:00:21 -0700 | [diff] [blame] | 701 | free(unwrapped_buffer); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 702 | } |
| 703 | |
| 704 | VKAPI_ATTR void VKAPI_CALL CmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer, |
| 705 | VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, |
| 706 | VkPipelineLayout layout, uint32_t set, const void *pData) { |
| 707 | layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(commandBuffer), layer_data_map); |
Mark Lobodzinski | 1c47126 | 2017-03-28 16:22:56 -0600 | [diff] [blame] | 708 | uint64_t template_handle = reinterpret_cast<uint64_t &>(descriptorUpdateTemplate); |
Mark Lobodzinski | c8a0c9b | 2017-11-13 09:42:58 -0700 | [diff] [blame] | 709 | void *unwrapped_buffer = nullptr; |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 710 | { |
| 711 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 712 | descriptorUpdateTemplate = Unwrap(descriptorUpdateTemplate); |
| 713 | layout = Unwrap(layout); |
Mark Lobodzinski | c8a0c9b | 2017-11-13 09:42:58 -0700 | [diff] [blame] | 714 | unwrapped_buffer = BuildUnwrappedUpdateTemplateBuffer(dev_data, template_handle, pData); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 715 | } |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 716 | dev_data->dispatch_table.CmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 717 | unwrapped_buffer); |
Mark Lobodzinski | 1c47126 | 2017-03-28 16:22:56 -0600 | [diff] [blame] | 718 | free(unwrapped_buffer); |
Mark Lobodzinski | 71703a5 | 2017-03-03 08:40:16 -0700 | [diff] [blame] | 719 | } |
| 720 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 721 | #ifndef __ANDROID__ |
| 722 | VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, |
| 723 | VkDisplayPropertiesKHR *pProperties) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 724 | instance_layer_data *my_map_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 725 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 726 | VkResult result = |
| 727 | my_map_data->dispatch_table.GetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties); |
Chris Forbes | f1e49bf | 2017-05-02 17:36:57 -0700 | [diff] [blame] | 728 | if ((result == VK_SUCCESS || result == VK_INCOMPLETE) && pProperties) { |
| 729 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 730 | for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 731 | pProperties[idx0].display = WrapNew(pProperties[idx0].display); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 732 | } |
| 733 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 734 | return result; |
| 735 | } |
| 736 | |
| 737 | VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, |
| 738 | uint32_t *pDisplayCount, VkDisplayKHR *pDisplays) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 739 | instance_layer_data *my_map_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 740 | VkResult result = |
| 741 | my_map_data->dispatch_table.GetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 742 | if (VK_SUCCESS == result) { |
| 743 | if ((*pDisplayCount > 0) && pDisplays) { |
| 744 | std::lock_guard<std::mutex> lock(global_lock); |
| 745 | for (uint32_t i = 0; i < *pDisplayCount; i++) { |
Chris Forbes | 824c117 | 2017-05-02 17:45:29 -0700 | [diff] [blame] | 746 | // TODO: this looks like it really wants a /reverse/ mapping. What's going on here? |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 747 | auto it = unique_id_mapping.find(reinterpret_cast<const uint64_t &>(pDisplays[i])); |
| 748 | assert(it != unique_id_mapping.end()); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 749 | pDisplays[i] = reinterpret_cast<VkDisplayKHR &>(it->second); |
| 750 | } |
| 751 | } |
| 752 | } |
| 753 | return result; |
| 754 | } |
| 755 | |
| 756 | VKAPI_ATTR VkResult VKAPI_CALL GetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, |
| 757 | uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 758 | instance_layer_data *my_map_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 759 | { |
| 760 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 761 | display = Unwrap(display); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 762 | } |
| 763 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 764 | VkResult result = my_map_data->dispatch_table.GetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 765 | if (result == VK_SUCCESS && pProperties) { |
Chris Forbes | ef35afd | 2017-05-02 17:45:45 -0700 | [diff] [blame] | 766 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 767 | for (uint32_t idx0 = 0; idx0 < *pPropertyCount; ++idx0) { |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 768 | pProperties[idx0].displayMode = WrapNew(pProperties[idx0].displayMode); |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 769 | } |
| 770 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 771 | return result; |
| 772 | } |
Norbert Nopper | 1dec9a5 | 2016-11-25 07:55:13 +0100 | [diff] [blame] | 773 | |
| 774 | VKAPI_ATTR VkResult VKAPI_CALL GetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, |
| 775 | uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities) { |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 776 | instance_layer_data *dev_data = GetLayerDataPtr(get_dispatch_key(physicalDevice), instance_layer_data_map); |
Norbert Nopper | 1dec9a5 | 2016-11-25 07:55:13 +0100 | [diff] [blame] | 777 | { |
| 778 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 779 | mode = Unwrap(mode); |
Norbert Nopper | 1dec9a5 | 2016-11-25 07:55:13 +0100 | [diff] [blame] | 780 | } |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 781 | VkResult result = dev_data->dispatch_table.GetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities); |
Norbert Nopper | 1dec9a5 | 2016-11-25 07:55:13 +0100 | [diff] [blame] | 782 | return result; |
| 783 | } |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 784 | #endif |
| 785 | |
Mark Lobodzinski | e4f2c5f | 2017-07-17 14:26:47 -0600 | [diff] [blame] | 786 | VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo) { |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 787 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 788 | auto local_tag_info = new safe_VkDebugMarkerObjectTagInfoEXT(pTagInfo); |
| 789 | { |
| 790 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 791 | auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_tag_info->object)); |
| 792 | if (it != unique_id_mapping.end()) { |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 793 | local_tag_info->object = it->second; |
| 794 | } |
| 795 | } |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 796 | VkResult result = device_data->dispatch_table.DebugMarkerSetObjectTagEXT( |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 797 | device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>(local_tag_info)); |
| 798 | return result; |
| 799 | } |
| 800 | |
Mark Lobodzinski | e4f2c5f | 2017-07-17 14:26:47 -0600 | [diff] [blame] | 801 | VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo) { |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 802 | layer_data *device_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); |
| 803 | auto local_name_info = new safe_VkDebugMarkerObjectNameInfoEXT(pNameInfo); |
| 804 | { |
| 805 | std::lock_guard<std::mutex> lock(global_lock); |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 806 | auto it = unique_id_mapping.find(reinterpret_cast<uint64_t &>(local_name_info->object)); |
| 807 | if (it != unique_id_mapping.end()) { |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 808 | local_name_info->object = it->second; |
| 809 | } |
| 810 | } |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 811 | VkResult result = device_data->dispatch_table.DebugMarkerSetObjectNameEXT( |
Mark Lobodzinski | a096c12 | 2017-03-16 11:54:35 -0600 | [diff] [blame] | 812 | device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>(local_name_info)); |
| 813 | return result; |
| 814 | } |
| 815 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 816 | } // namespace unique_objects |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 817 | |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 818 | VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pCount, |
| 819 | VkExtensionProperties *pProperties) { |
| 820 | return unique_objects::EnumerateInstanceExtensionProperties(pLayerName, pCount, pProperties); |
| 821 | } |
| 822 | |
| 823 | VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pCount, |
| 824 | VkLayerProperties *pProperties) { |
| 825 | return unique_objects::EnumerateInstanceLayerProperties(pCount, pProperties); |
| 826 | } |
| 827 | |
| 828 | VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pCount, |
| 829 | VkLayerProperties *pProperties) { |
| 830 | assert(physicalDevice == VK_NULL_HANDLE); |
| 831 | return unique_objects::EnumerateDeviceLayerProperties(VK_NULL_HANDLE, pCount, pProperties); |
| 832 | } |
| 833 | |
| 834 | VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char *funcName) { |
| 835 | return unique_objects::GetDeviceProcAddr(dev, funcName); |
| 836 | } |
| 837 | |
| 838 | VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *funcName) { |
| 839 | return unique_objects::GetInstanceProcAddr(instance, funcName); |
| 840 | } |
| 841 | |
| 842 | VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, |
| 843 | const char *pLayerName, uint32_t *pCount, |
| 844 | VkExtensionProperties *pProperties) { |
| 845 | assert(physicalDevice == VK_NULL_HANDLE); |
| 846 | return unique_objects::EnumerateDeviceExtensionProperties(VK_NULL_HANDLE, pLayerName, pCount, pProperties); |
| 847 | } |
Mark Young | 3938987 | 2017-01-19 21:10:49 -0700 | [diff] [blame] | 848 | |
Mark Lobodzinski | 729a8d3 | 2017-01-26 12:16:30 -0700 | [diff] [blame] | 849 | VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_layerGetPhysicalDeviceProcAddr(VkInstance instance, |
| 850 | const char *funcName) { |
Mark Young | 3938987 | 2017-01-19 21:10:49 -0700 | [diff] [blame] | 851 | return unique_objects::GetPhysicalDeviceProcAddr(instance, funcName); |
| 852 | } |
| 853 | |
| 854 | VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct) { |
| 855 | assert(pVersionStruct != NULL); |
| 856 | assert(pVersionStruct->sType == LAYER_NEGOTIATE_INTERFACE_STRUCT); |
| 857 | |
| 858 | // Fill in the function pointers if our version is at least capable of having the structure contain them. |
| 859 | if (pVersionStruct->loaderLayerInterfaceVersion >= 2) { |
| 860 | pVersionStruct->pfnGetInstanceProcAddr = vkGetInstanceProcAddr; |
| 861 | pVersionStruct->pfnGetDeviceProcAddr = vkGetDeviceProcAddr; |
| 862 | pVersionStruct->pfnGetPhysicalDeviceProcAddr = vk_layerGetPhysicalDeviceProcAddr; |
| 863 | } |
| 864 | |
| 865 | if (pVersionStruct->loaderLayerInterfaceVersion < CURRENT_LOADER_LAYER_INTERFACE_VERSION) { |
| 866 | unique_objects::loader_layer_if_version = pVersionStruct->loaderLayerInterfaceVersion; |
| 867 | } else if (pVersionStruct->loaderLayerInterfaceVersion > CURRENT_LOADER_LAYER_INTERFACE_VERSION) { |
| 868 | pVersionStruct->loaderLayerInterfaceVersion = CURRENT_LOADER_LAYER_INTERFACE_VERSION; |
| 869 | } |
| 870 | |
| 871 | return VK_SUCCESS; |
| 872 | } |