blob: 68505e283f2240774391cdd074cb2e78861c8a8b [file] [log] [blame]
Tobin Ehlis0b99d032015-12-08 10:50:10 -07001/*
2 *
3 * Copyright (C) 2015 Google, Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included
13 * in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Author: Tobin Ehlis <tobine@google.com>
24 */
25
Tobin Ehlis0b99d032015-12-08 10:50:10 -070026#include <stdio.h>
27#include <stdlib.h>
28#include <string.h>
29#include <inttypes.h>
30
31#include "vulkan/vulkan.h"
32#include "vk_loader_platform.h"
33
34#include <vector>
35#include <unordered_map>
36
37#include "vulkan/vk_layer.h"
38#include "vk_layer_config.h"
Tobin Ehlis0b99d032015-12-08 10:50:10 -070039#include "vk_layer_table.h"
40#include "vk_layer_data.h"
41#include "vk_layer_logging.h"
42#include "vk_layer_extension_utils.h"
43
44struct layer_data {
Tobin Ehlis0b99d032015-12-08 10:50:10 -070045 bool wsi_enabled;
46
47 layer_data() :
Tobin Ehlis0b99d032015-12-08 10:50:10 -070048 wsi_enabled(false)
49 {};
50};
51
52struct instExts {
53 bool wsi_enabled;
Tobin Ehlis115790b2016-01-05 16:34:59 -070054 bool xlib_enabled;
55 bool xcb_enabled;
56 bool wayland_enabled;
57 bool mir_enabled;
58 bool android_enabled;
59 bool win32_enabled;
Tobin Ehlis0b99d032015-12-08 10:50:10 -070060};
61
62static std::unordered_map<void*, struct instExts> instanceExtMap;
63static std::unordered_map<void*, layer_data *> layer_data_map;
64static device_table_map unique_objects_device_table_map;
65static instance_table_map unique_objects_instance_table_map;
66// Structure to wrap returned non-dispatchable objects to guarantee they have unique handles
67// address of struct will be used as the unique handle
68struct VkUniqueObject
69{
70 uint64_t actualObject;
71};
72
Tobin Ehlis0b99d032015-12-08 10:50:10 -070073// Handle CreateInstance
74static void createInstanceRegisterExtensions(const VkInstanceCreateInfo* pCreateInfo, VkInstance instance)
75{
76 uint32_t i;
77 VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(unique_objects_instance_table_map, instance);
78 PFN_vkGetInstanceProcAddr gpa = pDisp->GetInstanceProcAddr;
79 pDisp->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
80 pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
81 pDisp->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
82 pDisp->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
Tobin Ehlis115790b2016-01-05 16:34:59 -070083#ifdef VK_USE_PLATFORM_WIN32_KHR
Tobin Ehlis0b99d032015-12-08 10:50:10 -070084 pDisp->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
85 pDisp->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
86#endif // VK_USE_PLATFORM_WIN32_KHR
87#ifdef VK_USE_PLATFORM_XCB_KHR
88 pDisp->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
89 pDisp->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
90#endif // VK_USE_PLATFORM_XCB_KHR
91#ifdef VK_USE_PLATFORM_XLIB_KHR
92 pDisp->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
93 pDisp->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
94#endif // VK_USE_PLATFORM_XLIB_KHR
95#ifdef VK_USE_PLATFORM_MIR_KHR
96 pDisp->CreateMirSurfaceKHR = (PFN_vkCreateMirSurfaceKHR) gpa(instance, "vkCreateMirSurfaceKHR");
97 pDisp->GetPhysicalDeviceMirPresentationSupportKHR = (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
98#endif // VK_USE_PLATFORM_MIR_KHR
99#ifdef VK_USE_PLATFORM_WAYLAND_KHR
100 pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
101 pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
102#endif // VK_USE_PLATFORM_WAYLAND_KHR
103#ifdef VK_USE_PLATFORM_ANDROID_KHR
104 pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
105#endif // VK_USE_PLATFORM_ANDROID_KHR
106
Tobin Ehlis115790b2016-01-05 16:34:59 -0700107 instanceExtMap[pDisp] = {};
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700108 for (i = 0; i < pCreateInfo->enabledExtensionNameCount; i++) {
109 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0)
110 instanceExtMap[pDisp].wsi_enabled = true;
Tobin Ehlis115790b2016-01-05 16:34:59 -0700111#ifdef VK_USE_PLATFORM_XLIB_KHR
112 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0)
113 instanceExtMap[pDisp].xlib_enabled = true;
114#endif
115#ifdef VK_USE_PLATFORM_XCB_KHR
116 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0)
117 instanceExtMap[pDisp].xcb_enabled = true;
118#endif
119#ifdef VK_USE_PLATFORM_WAYLAND_KHR
120 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0)
121 instanceExtMap[pDisp].wayland_enabled = true;
122#endif
123#ifdef VK_USE_PLATFORM_MIR_KHR
124 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0)
125 instanceExtMap[pDisp].mir_enabled = true;
126#endif
127#ifdef VK_USE_PLATFORM_ANDROID_KHR
128 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0)
129 instanceExtMap[pDisp].android_enabled = true;
130#endif
131#ifdef VK_USE_PLATFORM_WIN32_KHR
132 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0)
133 instanceExtMap[pDisp].win32_enabled = true;
134#endif
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700135 }
136}
137
138VkResult
139explicit_CreateInstance(
140 const VkInstanceCreateInfo *pCreateInfo,
141 const VkAllocationCallbacks *pAllocator,
142 VkInstance *pInstance)
143{
144
145 VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(unique_objects_instance_table_map, *pInstance);
146 VkResult result = pInstanceTable->CreateInstance(pCreateInfo, pAllocator, pInstance);
147
148 if (result == VK_SUCCESS) {
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700149 createInstanceRegisterExtensions(pCreateInfo, *pInstance);
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700150 }
151 return result;
152}
153
154// Handle CreateDevice
155static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
156{
157 layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
158 VkLayerDispatchTable *pDisp = get_dispatch_table(unique_objects_device_table_map, device);
159 PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
160 pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
161 pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
162 pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
163 pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
164 pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
165 my_device_data->wsi_enabled = false;
166 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionNameCount; i++) {
167 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
168 my_device_data->wsi_enabled = true;
169 }
170}
171
172VkResult
173explicit_CreateDevice(
174 VkPhysicalDevice gpu,
175 const VkDeviceCreateInfo *pCreateInfo,
176 const VkAllocationCallbacks *pAllocator,
177 VkDevice *pDevice)
178{
179 VkLayerDispatchTable *pDeviceTable = get_dispatch_table(unique_objects_device_table_map, *pDevice);
180 VkResult result = pDeviceTable->CreateDevice(gpu, pCreateInfo, pAllocator, pDevice);
181 if (result == VK_SUCCESS) {
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700182 createDeviceRegisterExtensions(pCreateInfo, *pDevice);
183 }
184 return result;
185}
186
187VkResult explicit_QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
188{
189// UNWRAP USES:
190// 0 : fence,VkFence
191 if (VK_NULL_HANDLE != fence) {
192 fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
193 }
194// waitSemaphoreCount : pSubmits[submitCount]->pWaitSemaphores,VkSemaphore
195 std::vector<VkSemaphore> original_pWaitSemaphores = {};
196// signalSemaphoreCount : pSubmits[submitCount]->pSignalSemaphores,VkSemaphore
197 std::vector<VkSemaphore> original_pSignalSemaphores = {};
198 if (pSubmits) {
199 for (uint32_t index0=0; index0<submitCount; ++index0) {
200 if (pSubmits[index0].pWaitSemaphores) {
201 for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
202 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
203 original_pWaitSemaphores.push_back(pSubmits[index0].pWaitSemaphores[index1]);
204 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pWaitSemaphores[index1])->actualObject;
205 }
206 }
207 if (pSubmits[index0].pSignalSemaphores) {
208 for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
209 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
210 original_pSignalSemaphores.push_back(pSubmits[index0].pSignalSemaphores[index1]);
211 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pSignalSemaphores[index1])->actualObject;
212 }
213 }
214 }
215 }
216 VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
217 if (pSubmits) {
218 for (uint32_t index0=0; index0<submitCount; ++index0) {
219 if (pSubmits[index0].pWaitSemaphores) {
220 for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
221 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
222 *(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
223 }
224 }
225 if (pSubmits[index0].pSignalSemaphores) {
226 for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
227 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
228 *(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
229 }
230 }
231 }
232 }
233 return result;
234}
235
236VkResult explicit_QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
237{
238// UNWRAP USES:
239// 0 : pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->buffer,VkBuffer, pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->pBinds[bindCount]->memory,VkDeviceMemory
240 std::vector<VkBuffer> original_buffer = {};
241 std::vector<VkDeviceMemory> original_memory1 = {};
242 std::vector<VkImage> original_image1 = {};
243 std::vector<VkDeviceMemory> original_memory2 = {};
244 std::vector<VkImage> original_image2 = {};
245 std::vector<VkDeviceMemory> original_memory3 = {};
246 std::vector<VkSemaphore> original_pWaitSemaphores = {};
247 std::vector<VkSemaphore> original_pSignalSemaphores = {};
248 if (pBindInfo) {
249 for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
250 if (pBindInfo[index0].pBufferBinds) {
251 for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
252 if (pBindInfo[index0].pBufferBinds[index1].buffer) {
253 VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
254 original_buffer.push_back(pBindInfo[index0].pBufferBinds[index1].buffer);
255 *(pBuffer) = (VkBuffer)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].buffer)->actualObject;
256 }
257 if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
258 for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
259 if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
260 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
261 original_memory1.push_back(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
262 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory)->actualObject;
263 }
264 }
265 }
266 }
267 }
268 if (pBindInfo[index0].pImageOpaqueBinds) {
269 for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
270 if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
271 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
272 original_image1.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].image);
273 *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].image)->actualObject;
274 }
275 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
276 for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
277 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
278 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
279 original_memory2.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
280 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory)->actualObject;
281 }
282 }
283 }
284 }
285 }
286 if (pBindInfo[index0].pImageBinds) {
287 for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
288 if (pBindInfo[index0].pImageBinds[index1].image) {
289 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
290 original_image2.push_back(pBindInfo[index0].pImageBinds[index1].image);
291 *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].image)->actualObject;
292 }
293 if (pBindInfo[index0].pImageBinds[index1].pBinds) {
294 for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
295 if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
296 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
297 original_memory3.push_back(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
298 *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory)->actualObject;
299 }
300 }
301 }
302 }
303 }
304 if (pBindInfo[index0].pWaitSemaphores) {
305 for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
306 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
307 original_pWaitSemaphores.push_back(pBindInfo[index0].pWaitSemaphores[index1]);
308 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pWaitSemaphores[index1])->actualObject;
309 }
310 }
311 if (pBindInfo[index0].pSignalSemaphores) {
312 for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
313 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
314 original_pSignalSemaphores.push_back(pBindInfo[index0].pSignalSemaphores[index1]);
315 *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pSignalSemaphores[index1])->actualObject;
316 }
317 }
318 }
319 }
320 if (VK_NULL_HANDLE != fence) {
321 fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
322 }
323 VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
324 if (pBindInfo) {
325 for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
326 if (pBindInfo[index0].pBufferBinds) {
327 for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
328 if (pBindInfo[index0].pBufferBinds[index1].buffer) {
329 VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
330 *(pBuffer) = original_buffer[index1];
331 }
332 if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
333 for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
334 if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
335 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
336 *(pDeviceMemory) = original_memory1[index2];
337 }
338 }
339 }
340 }
341 }
342 if (pBindInfo[index0].pImageOpaqueBinds) {
343 for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
344 if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
345 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
346 *(pImage) = original_image1[index1];
347 }
348 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
349 for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
350 if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
351 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
352 *(pDeviceMemory) = original_memory2[index2];
353 }
354 }
355 }
356 }
357 }
358 if (pBindInfo[index0].pImageBinds) {
359 for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
360 if (pBindInfo[index0].pImageBinds[index1].image) {
361 VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
362 *(pImage) = original_image2[index1];
363 }
364 if (pBindInfo[index0].pImageBinds[index1].pBinds) {
365 for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
366 if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
367 VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
368 *(pDeviceMemory) = original_memory3[index2];
369 }
370 }
371 }
372 }
373 }
374 if (pBindInfo[index0].pWaitSemaphores) {
375 for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
376 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
377 *(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
378 }
379 }
380 if (pBindInfo[index0].pSignalSemaphores) {
381 for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
382 VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
383 *(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
384 }
385 }
386 }
387 }
388 return result;
389}
390
391VkResult explicit_CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
392{
393// UNWRAP USES:
394// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->stage[0]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
395 if (VK_NULL_HANDLE != pipelineCache) {
396 pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
397 }
398 std::vector<VkShaderModule> original_module = {};
399 std::vector<VkPipelineLayout> original_layout = {};
400 std::vector<VkPipeline> original_basePipelineHandle = {};
401 if (pCreateInfos) {
402 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
403 if (pCreateInfos[index0].stage.module) {
404 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
405 original_module.push_back(pCreateInfos[index0].stage.module);
406 *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].stage.module)->actualObject;
407 }
408 if (pCreateInfos[index0].layout) {
409 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
410 original_layout.push_back(pCreateInfos[index0].layout);
411 *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
412 }
413 if (pCreateInfos[index0].basePipelineHandle) {
414 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
415 original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
416 *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
417 }
418 }
419 }
420 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
421 if (pCreateInfos) {
422 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
423 if (pCreateInfos[index0].stage.module) {
424 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
425 *(pShaderModule) = original_module[index0];
426 }
427 if (pCreateInfos[index0].layout) {
428 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
429 *(pPipelineLayout) = original_layout[index0];
430 }
431 if (pCreateInfos[index0].basePipelineHandle) {
432 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
433 *(pPipeline) = original_basePipelineHandle[index0];
434 }
435 }
436 }
437 if (VK_SUCCESS == result) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700438 VkUniqueObject* pUO = NULL;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700439 for (uint32_t i=0; i<createInfoCount; ++i) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700440 pUO = new VkUniqueObject();
441 pUO->actualObject = (uint64_t)pPipelines[i];
442 pPipelines[i] = (VkPipeline)pUO;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700443 }
444 }
445 return result;
446}
447
448VkResult explicit_CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
449{
450// UNWRAP USES:
451// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->pStages[stageCount]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->renderPass,VkRenderPass, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
452 if (VK_NULL_HANDLE != pipelineCache) {
453 pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
454 }
455 std::vector<VkShaderModule> original_module = {};
456 std::vector<VkPipelineLayout> original_layout = {};
457 std::vector<VkRenderPass> original_renderPass = {};
458 std::vector<VkPipeline> original_basePipelineHandle = {};
459 if (pCreateInfos) {
460 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
461 if (pCreateInfos[index0].pStages) {
462 for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
463 if (pCreateInfos[index0].pStages[index1].module) {
464 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
465 original_module.push_back(pCreateInfos[index0].pStages[index1].module);
466 *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].pStages[index1].module)->actualObject;
467 }
468 }
469 }
470 if (pCreateInfos[index0].layout) {
471 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
472 original_layout.push_back(pCreateInfos[index0].layout);
473 *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
474 }
475 if (pCreateInfos[index0].renderPass) {
476 VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
477 original_renderPass.push_back(pCreateInfos[index0].renderPass);
478 *(pRenderPass) = (VkRenderPass)((VkUniqueObject*)pCreateInfos[index0].renderPass)->actualObject;
479 }
480 if (pCreateInfos[index0].basePipelineHandle) {
481 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
482 original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
483 *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
484 }
485 }
486 }
487 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
488 if (pCreateInfos) {
489 for (uint32_t index0=0; index0<createInfoCount; ++index0) {
490 if (pCreateInfos[index0].pStages) {
491 for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
492 if (pCreateInfos[index0].pStages[index1].module) {
493 VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
494 *(pShaderModule) = original_module[index1];
495 }
496 }
497 }
498 if (pCreateInfos[index0].layout) {
499 VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
500 *(pPipelineLayout) = original_layout[index0];
501 }
502 if (pCreateInfos[index0].renderPass) {
503 VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
504 *(pRenderPass) = original_renderPass[index0];
505 }
506 if (pCreateInfos[index0].basePipelineHandle) {
507 VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
508 *(pPipeline) = original_basePipelineHandle[index0];
509 }
510 }
511 }
512 if (VK_SUCCESS == result) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700513 VkUniqueObject* pUO = NULL;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700514 for (uint32_t i=0; i<createInfoCount; ++i) {
Tobin Ehlis115790b2016-01-05 16:34:59 -0700515 pUO = new VkUniqueObject();
516 pUO->actualObject = (uint64_t)pPipelines[i];
517 pPipelines[i] = (VkPipeline)pUO;
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700518 }
519 }
520 return result;
521}
522
Tobin Ehlis0b99d032015-12-08 10:50:10 -0700523VkResult explicit_GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages)
524{
525// UNWRAP USES:
526// 0 : swapchain,VkSwapchainKHR, pSwapchainImages,VkImage
527 if (VK_NULL_HANDLE != swapchain) {
528 swapchain = (VkSwapchainKHR)((VkUniqueObject*)swapchain)->actualObject;
529 }
530 VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
531 // TODO : Need to add corresponding code to delete these images
532 if (VK_SUCCESS == result) {
533 if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
534 std::vector<VkUniqueObject*> uniqueImages = {};
535 for (uint32_t i=0; i<*pSwapchainImageCount; ++i) {
536 uniqueImages.push_back(new VkUniqueObject());
537 uniqueImages[i]->actualObject = (uint64_t)pSwapchainImages[i];
538 pSwapchainImages[i] = (VkImage)uniqueImages[i];
539 }
540 }
541 }
542 return result;
543}