blob: 8d344a055af8dcea6c55b62e5ac8b6432c1a1356 [file] [log] [blame]
Tobin Ehlis0b99d032015-12-08 10:50:10 -07001/*
2 *
3 * Copyright (C) 2015 Google, Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Author: Tobin Ehlis <tobine@google.com>
24 */
25
Tobin Ehlis0b99d032015-12-08 10:50:10 -070026#include <stdio.h>
27#include <stdlib.h>
28#include <string.h>
29#include <inttypes.h>
30
31#include "vulkan/vulkan.h"
32#include "vk_loader_platform.h"
33
34#include <vector>
35#include <unordered_map>
36
37#include "vulkan/vk_layer.h"
38#include "vk_layer_config.h"
Tobin Ehlis0b99d032015-12-08 10:50:10 -070039#include "vk_layer_table.h"
40#include "vk_layer_data.h"
41#include "vk_layer_logging.h"
42#include "vk_layer_extension_utils.h"
43
44struct layer_data {
Tobin Ehlis0b99d032015-12-08 10:50:10 -070045 bool wsi_enabled;
46
47 layer_data() :
Tobin Ehlis0b99d032015-12-08 10:50:10 -070048 wsi_enabled(false)
49 {};
50};
51
52struct instExts {
53 bool wsi_enabled;
Tobin Ehlis115790b2016-01-05 16:34:59 -070054 bool xlib_enabled;
55 bool xcb_enabled;
56 bool wayland_enabled;
57 bool mir_enabled;
58 bool android_enabled;
59 bool win32_enabled;
Tobin Ehlis0b99d032015-12-08 10:50:10 -070060};
61
62static std::unordered_map<void*, struct instExts> instanceExtMap;
63static std::unordered_map<void*, layer_data *> layer_data_map;
64static device_table_map unique_objects_device_table_map;
65static instance_table_map unique_objects_instance_table_map;
66// Structure to wrap returned non-dispatchable objects to guarantee they have unique handles
67// address of struct will be used as the unique handle
68struct VkUniqueObject
69{
70 uint64_t actualObject;
71};
72
Tobin Ehlis0b99d032015-12-08 10:50:10 -070073// Handle CreateInstance
74static void createInstanceRegisterExtensions(const VkInstanceCreateInfo* pCreateInfo, VkInstance instance)
75{
76 uint32_t i;
77 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(unique_objects_instance_table_map, instance);
78 PFN_vkGetInstanceProcAddr gpa = pDisp->GetInstanceProcAddr;
79 pDisp->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
80 pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
81 pDisp->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
82 pDisp->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
Tobin Ehlis115790b2016-01-05 16:34:59 -070083#ifdef VK_USE_PLATFORM_WIN32_KHR
Tobin Ehlis0b99d032015-12-08 10:50:10 -070084 pDisp->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
85 pDisp->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
86#endif // VK_USE_PLATFORM_WIN32_KHR
87#ifdef VK_USE_PLATFORM_XCB_KHR
88 pDisp->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
89 pDisp->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
90#endif // VK_USE_PLATFORM_XCB_KHR
91#ifdef VK_USE_PLATFORM_XLIB_KHR
92 pDisp->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
93 pDisp->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
94#endif // VK_USE_PLATFORM_XLIB_KHR
95#ifdef VK_USE_PLATFORM_MIR_KHR
96 pDisp->CreateMirSurfaceKHR = (PFN_vkCreateMirSurfaceKHR) gpa(instance, "vkCreateMirSurfaceKHR");
97 pDisp->GetPhysicalDeviceMirPresentationSupportKHR = (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
98#endif // VK_USE_PLATFORM_MIR_KHR
99#ifdef VK_USE_PLATFORM_WAYLAND_KHR
100 pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
101 pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
102#endif // VK_USE_PLATFORM_WAYLAND_KHR
103#ifdef VK_USE_PLATFORM_ANDROID_KHR
104 pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
105#endif // VK_USE_PLATFORM_ANDROID_KHR
106
Tobin Ehlis115790b2016-01-05 16:34:59 -0700107 instanceExtMap[pDisp] = {};
Jon Ashburna4ae48b2016-01-11 13:12:43 -0700108 for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700109 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0)
110 instanceExtMap[pDisp].wsi_enabled = true;
Tobin Ehlis115790b2016-01-05 16:34:59 -0700111#ifdef VK_USE_PLATFORM_XLIB_KHR
112 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0)
113 instanceExtMap[pDisp].xlib_enabled = true;
114#endif
115#ifdef VK_USE_PLATFORM_XCB_KHR
116 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0)
117 instanceExtMap[pDisp].xcb_enabled = true;
118#endif
119#ifdef VK_USE_PLATFORM_WAYLAND_KHR
120 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0)
121 instanceExtMap[pDisp].wayland_enabled = true;
122#endif
123#ifdef VK_USE_PLATFORM_MIR_KHR
124 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0)
125 instanceExtMap[pDisp].mir_enabled = true;
126#endif
127#ifdef VK_USE_PLATFORM_ANDROID_KHR
128 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0)
129 instanceExtMap[pDisp].android_enabled = true;
130#endif
131#ifdef VK_USE_PLATFORM_WIN32_KHR
132 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0)
133 instanceExtMap[pDisp].win32_enabled = true;
134#endif
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700135 }
136}
137
138VkResult
139explicit_CreateInstance(
140 const VkInstanceCreateInfo *pCreateInfo,
141 const VkAllocationCallbacks *pAllocator,
142 VkInstance *pInstance)
143{
Courtney Goeltzenleuchter2bdf6da2016-01-08 12:18:43 -0700144 VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700145
Courtney Goeltzenleuchter2bdf6da2016-01-08 12:18:43 -0700146 assert(chain_info->u.pLayerInfo);
147 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
148 PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
149 if (fpCreateInstance == NULL) {
150 return VK_ERROR_INITIALIZATION_FAILED;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700151 }
Courtney Goeltzenleuchter2bdf6da2016-01-08 12:18:43 -0700152
153 // Advance the link info for the next element on the chain
154 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
155
156 VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
157 if (result != VK_SUCCESS) {
158 return result;
159 }
160
161 initInstanceTable(*pInstance, fpGetInstanceProcAddr, unique_objects_instance_table_map);
162
163 createInstanceRegisterExtensions(pCreateInfo, *pInstance);
164
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700165 return result;
166}
167
168// Handle CreateDevice
169static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
170{
171 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
172 VkLayerDispatchTable *pDisp = get_dispatch_table(unique_objects_device_table_map, device);
173 PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
174 pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
175 pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
176 pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
177 pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
178 pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
179 my_device_data->wsi_enabled = false;
Jon Ashburna4ae48b2016-01-11 13:12:43 -0700180 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700181 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
182 my_device_data->wsi_enabled = true;
183 }
184}
185
186VkResult
187explicit_CreateDevice(
188 VkPhysicalDevice gpu,
189 const VkDeviceCreateInfo *pCreateInfo,
190 const VkAllocationCallbacks *pAllocator,
191 VkDevice *pDevice)
192{
Courtney Goeltzenleuchter2bdf6da2016-01-08 12:18:43 -0700193 VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
194
195 assert(chain_info->u.pLayerInfo);
196 PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
197 PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
198 PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
199 if (fpCreateDevice == NULL) {
200 return VK_ERROR_INITIALIZATION_FAILED;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700201 }
Courtney Goeltzenleuchter2bdf6da2016-01-08 12:18:43 -0700202
203 // Advance the link info for the next element on the chain
204 chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
205
206 VkResult result = fpCreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
207 if (result != VK_SUCCESS) {
208 return result;
209 }
210
211 // Setup layer's device dispatch table
212 initDeviceTable(*pDevice, fpGetDeviceProcAddr, unique_objects_device_table_map);
213
214 createDeviceRegisterExtensions(pCreateInfo, *pDevice);
215
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700216 return result;
217}
218
219VkResult explicit_QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
220{
221// UNWRAP USES:
222// 0 : fence,VkFence
223 if (VK_NULL_HANDLE != fence) {
224 fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
225 }
226// waitSemaphoreCount : pSubmits[submitCount]->pWaitSemaphores,VkSemaphore
227 std::vector<VkSemaphore> original_pWaitSemaphores = {};
228// signalSemaphoreCount : pSubmits[submitCount]->pSignalSemaphores,VkSemaphore
229 std::vector<VkSemaphore> original_pSignalSemaphores = {};
230 if (pSubmits) {
231 for (uint32_t index0=0; index0<submitCount; ++index0) {
232 if (pSubmits[index0].pWaitSemaphores) {
233 for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
234 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
235 original_pWaitSemaphores.push_back(pSubmits[index0].pWaitSemaphores[index1]);
236 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pWaitSemaphores[index1])->actualObject;
237 }
238 }
239 if (pSubmits[index0].pSignalSemaphores) {
240 for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
241 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
242 original_pSignalSemaphores.push_back(pSubmits[index0].pSignalSemaphores[index1]);
243 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pSignalSemaphores[index1])->actualObject;
244 }
245 }
246 }
247 }
248 VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
249 if (pSubmits) {
250 for (uint32_t index0=0; index0<submitCount; ++index0) {
251 if (pSubmits[index0].pWaitSemaphores) {
252 for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
253 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
254 *(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
255 }
256 }
257 if (pSubmits[index0].pSignalSemaphores) {
258 for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
259 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
260 *(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
261 }
262 }
263 }
264 }
265 return result;
266}
267
268VkResult explicit_QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
269{
270// UNWRAP USES:
271// 0 : pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->buffer,VkBuffer, pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->pBinds[bindCount]->memory,VkDeviceMemory
272 std::vector<VkBuffer> original_buffer = {};
273 std::vector<VkDeviceMemory> original_memory1 = {};
274 std::vector<VkImage> original_image1 = {};
275 std::vector<VkDeviceMemory> original_memory2 = {};
276 std::vector<VkImage> original_image2 = {};
277 std::vector<VkDeviceMemory> original_memory3 = {};
278 std::vector<VkSemaphore> original_pWaitSemaphores = {};
279 std::vector<VkSemaphore> original_pSignalSemaphores = {};
280 if (pBindInfo) {
281 for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
282 if (pBindInfo[index0].pBufferBinds) {
283 for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
284 if (pBindInfo[index0].pBufferBinds[index1].buffer) {
285 VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
286 original_buffer.push_back(pBindInfo[index0].pBufferBinds[index1].buffer);
287 *(pBuffer) = (VkBuffer)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].buffer)->actualObject;
288 }
289 if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
290 for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
291 if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
292 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
293 original_memory1.push_back(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
294 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory)->actualObject;
295 }
296 }
297 }
298 }
299 }
300 if (pBindInfo[index0].pImageOpaqueBinds) {
301 for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
302 if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
303 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
304 original_image1.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].image);
305 *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].image)->actualObject;
306 }
307 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
308 for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
309 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
310 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
311 original_memory2.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
312 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory)->actualObject;
313 }
314 }
315 }
316 }
317 }
318 if (pBindInfo[index0].pImageBinds) {
319 for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
320 if (pBindInfo[index0].pImageBinds[index1].image) {
321 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
322 original_image2.push_back(pBindInfo[index0].pImageBinds[index1].image);
323 *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].image)->actualObject;
324 }
325 if (pBindInfo[index0].pImageBinds[index1].pBinds) {
326 for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
327 if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
328 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
329 original_memory3.push_back(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
330 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory)->actualObject;
331 }
332 }
333 }
334 }
335 }
336 if (pBindInfo[index0].pWaitSemaphores) {
337 for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
338 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
339 original_pWaitSemaphores.push_back(pBindInfo[index0].pWaitSemaphores[index1]);
340 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pWaitSemaphores[index1])->actualObject;
341 }
342 }
343 if (pBindInfo[index0].pSignalSemaphores) {
344 for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
345 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
346 original_pSignalSemaphores.push_back(pBindInfo[index0].pSignalSemaphores[index1]);
347 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pSignalSemaphores[index1])->actualObject;
348 }
349 }
350 }
351 }
352 if (VK_NULL_HANDLE != fence) {
353 fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
354 }
355 VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
356 if (pBindInfo) {
357 for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
358 if (pBindInfo[index0].pBufferBinds) {
359 for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
360 if (pBindInfo[index0].pBufferBinds[index1].buffer) {
361 VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
362 *(pBuffer) = original_buffer[index1];
363 }
364 if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
365 for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
366 if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
367 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
368 *(pDeviceMemory) = original_memory1[index2];
369 }
370 }
371 }
372 }
373 }
374 if (pBindInfo[index0].pImageOpaqueBinds) {
375 for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
376 if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
377 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
378 *(pImage) = original_image1[index1];
379 }
380 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
381 for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
382 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
383 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
384 *(pDeviceMemory) = original_memory2[index2];
385 }
386 }
387 }
388 }
389 }
390 if (pBindInfo[index0].pImageBinds) {
391 for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
392 if (pBindInfo[index0].pImageBinds[index1].image) {
393 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
394 *(pImage) = original_image2[index1];
395 }
396 if (pBindInfo[index0].pImageBinds[index1].pBinds) {
397 for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
398 if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
399 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
400 *(pDeviceMemory) = original_memory3[index2];
401 }
402 }
403 }
404 }
405 }
406 if (pBindInfo[index0].pWaitSemaphores) {
407 for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
408 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
409 *(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
410 }
411 }
412 if (pBindInfo[index0].pSignalSemaphores) {
413 for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
414 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
415 *(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
416 }
417 }
418 }
419 }
420 return result;
421}
422
423VkResult explicit_CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
424{
425// UNWRAP USES:
426// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->stage[0]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
427 if (VK_NULL_HANDLE != pipelineCache) {
428 pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
429 }
430 std::vector<VkShaderModule> original_module = {};
431 std::vector<VkPipelineLayout> original_layout = {};
432 std::vector<VkPipeline> original_basePipelineHandle = {};
433 if (pCreateInfos) {
434 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
435 if (pCreateInfos[index0].stage.module) {
436 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
437 original_module.push_back(pCreateInfos[index0].stage.module);
438 *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].stage.module)->actualObject;
439 }
440 if (pCreateInfos[index0].layout) {
441 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
442 original_layout.push_back(pCreateInfos[index0].layout);
443 *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
444 }
445 if (pCreateInfos[index0].basePipelineHandle) {
446 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
447 original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
448 *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
449 }
450 }
451 }
452 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
453 if (pCreateInfos) {
454 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
455 if (pCreateInfos[index0].stage.module) {
456 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
457 *(pShaderModule) = original_module[index0];
458 }
459 if (pCreateInfos[index0].layout) {
460 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
461 *(pPipelineLayout) = original_layout[index0];
462 }
463 if (pCreateInfos[index0].basePipelineHandle) {
464 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
465 *(pPipeline) = original_basePipelineHandle[index0];
466 }
467 }
468 }
469 if (VK_SUCCESS == result) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700470 VkUniqueObject* pUO = NULL;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700471 for (uint32_t i=0; i<createInfoCount; ++i) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700472 pUO = new VkUniqueObject();
473 pUO->actualObject = (uint64_t)pPipelines[i];
474 pPipelines[i] = (VkPipeline)pUO;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700475 }
476 }
477 return result;
478}
479
480VkResult explicit_CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
481{
482// UNWRAP USES:
483// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->pStages[stageCount]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->renderPass,VkRenderPass, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
484 if (VK_NULL_HANDLE != pipelineCache) {
485 pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
486 }
487 std::vector<VkShaderModule> original_module = {};
488 std::vector<VkPipelineLayout> original_layout = {};
489 std::vector<VkRenderPass> original_renderPass = {};
490 std::vector<VkPipeline> original_basePipelineHandle = {};
491 if (pCreateInfos) {
492 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
493 if (pCreateInfos[index0].pStages) {
494 for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
495 if (pCreateInfos[index0].pStages[index1].module) {
496 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
497 original_module.push_back(pCreateInfos[index0].pStages[index1].module);
498 *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].pStages[index1].module)->actualObject;
499 }
500 }
501 }
502 if (pCreateInfos[index0].layout) {
503 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
504 original_layout.push_back(pCreateInfos[index0].layout);
505 *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
506 }
507 if (pCreateInfos[index0].renderPass) {
508 VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
509 original_renderPass.push_back(pCreateInfos[index0].renderPass);
510 *(pRenderPass) = (VkRenderPass)((VkUniqueObject*)pCreateInfos[index0].renderPass)->actualObject;
511 }
512 if (pCreateInfos[index0].basePipelineHandle) {
513 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
514 original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
515 *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
516 }
517 }
518 }
519 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
520 if (pCreateInfos) {
521 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
522 if (pCreateInfos[index0].pStages) {
523 for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
524 if (pCreateInfos[index0].pStages[index1].module) {
525 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
526 *(pShaderModule) = original_module[index1];
527 }
528 }
529 }
530 if (pCreateInfos[index0].layout) {
531 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
532 *(pPipelineLayout) = original_layout[index0];
533 }
534 if (pCreateInfos[index0].renderPass) {
535 VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
536 *(pRenderPass) = original_renderPass[index0];
537 }
538 if (pCreateInfos[index0].basePipelineHandle) {
539 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
540 *(pPipeline) = original_basePipelineHandle[index0];
541 }
542 }
543 }
544 if (VK_SUCCESS == result) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700545 VkUniqueObject* pUO = NULL;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700546 for (uint32_t i=0; i<createInfoCount; ++i) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700547 pUO = new VkUniqueObject();
548 pUO->actualObject = (uint64_t)pPipelines[i];
549 pPipelines[i] = (VkPipeline)pUO;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700550 }
551 }
552 return result;
553}
554
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700555VkResult explicit_GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages)
556{
557// UNWRAP USES:
558// 0 : swapchain,VkSwapchainKHR, pSwapchainImages,VkImage
559 if (VK_NULL_HANDLE != swapchain) {
560 swapchain = (VkSwapchainKHR)((VkUniqueObject*)swapchain)->actualObject;
561 }
562 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
563 // TODO : Need to add corresponding code to delete these images
564 if (VK_SUCCESS == result) {
565 if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
566 std::vector<VkUniqueObject*> uniqueImages = {};
567 for (uint32_t i=0; i<*pSwapchainImageCount; ++i) {
568 uniqueImages.push_back(new VkUniqueObject());
569 uniqueImages[i]->actualObject = (uint64_t)pSwapchainImages[i];
570 pSwapchainImages[i] = (VkImage)uniqueImages[i];
571 }
572 }
573 }
574 return result;
575}