blob: 9447e01b12b317572b1426ecd428084215d1e944 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
44 for (const auto &extensionName : enabledExtensionNames)
45 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill0448ec82016-12-23 13:41:47 -050087RendererVk::RendererVk()
88 : mCapsInitialized(false),
89 mInstance(VK_NULL_HANDLE),
90 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -050091 mDebugReportCallback(VK_NULL_HANDLE),
92 mPhysicalDevice(VK_NULL_HANDLE),
93 mQueue(VK_NULL_HANDLE),
94 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
95 mDevice(VK_NULL_HANDLE),
Jamie Madill8ecf7f92017-01-13 17:29:52 -050096 mHostVisibleMemoryIndex(std::numeric_limits<uint32_t>::max()),
Jamie Madill4c26fc22017-02-24 11:04:10 -050097 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -040098 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
99 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500100 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400101{
102}
103
104RendererVk::~RendererVk()
105{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400106 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500107 {
108 vk::Error error = finish();
109 if (error.isError())
110 {
111 ERR() << "Error during VK shutdown: " << error;
112 }
113 }
114
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500115 if (mGlslangWrapper)
116 {
117 GlslangWrapper::ReleaseReference();
118 mGlslangWrapper = nullptr;
119 }
120
Jamie Madill5deea722017-02-16 10:44:46 -0500121 if (mCommandBuffer.valid())
122 {
123 mCommandBuffer.destroy(mDevice);
124 }
125
126 if (mCommandPool.valid())
127 {
128 mCommandPool.destroy(mDevice);
129 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500130
131 if (mDevice)
132 {
133 vkDestroyDevice(mDevice, nullptr);
134 mDevice = VK_NULL_HANDLE;
135 }
136
Jamie Madill0448ec82016-12-23 13:41:47 -0500137 if (mDebugReportCallback)
138 {
139 ASSERT(mInstance);
140 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
141 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
142 ASSERT(destroyDebugReportCallback);
143 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
144 }
145
Jamie Madill4d0bf552016-12-28 15:45:24 -0500146 if (mInstance)
147 {
148 vkDestroyInstance(mInstance, nullptr);
149 mInstance = VK_NULL_HANDLE;
150 }
151
152 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500153}
154
Frank Henigman29f148b2016-11-23 21:05:36 -0500155vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500156{
Jamie Madill222c5172017-07-19 16:15:42 -0400157 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500158
159 // If we're loading the validation layers, we could be running from any random directory.
160 // Change to the executable directory so we can find the layers, then change back to the
161 // previous directory to be safe we don't disrupt the application.
162 std::string previousCWD;
163
164 if (mEnableValidationLayers)
165 {
166 const auto &cwd = angle::GetCWD();
167 if (!cwd.valid())
168 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500169 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500170 mEnableValidationLayers = false;
171 }
172 else
173 {
174 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400175 const char *exeDir = angle::GetExecutableDirectory();
176 if (!angle::SetCWD(exeDir))
177 {
178 ERR() << "Error setting CWD for Vulkan layers init.";
179 mEnableValidationLayers = false;
180 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500181 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400182 }
183
184 // Override environment variable to use the ANGLE layers.
185 if (mEnableValidationLayers)
186 {
187 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
188 {
189 ERR() << "Error setting environment for Vulkan layers init.";
190 mEnableValidationLayers = false;
191 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500192 }
193
Jamie Madill0448ec82016-12-23 13:41:47 -0500194 // Gather global layer properties.
195 uint32_t instanceLayerCount = 0;
196 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
197
198 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
199 if (instanceLayerCount > 0)
200 {
201 ANGLE_VK_TRY(
202 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
203 }
204
Jamie Madille09bd5d2016-11-29 16:20:35 -0500205 uint32_t instanceExtensionCount = 0;
206 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
207
208 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
209 if (instanceExtensionCount > 0)
210 {
211 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
212 instanceExtensionProps.data()));
213 }
214
Jamie Madill0448ec82016-12-23 13:41:47 -0500215 if (mEnableValidationLayers)
216 {
217 // Verify the standard validation layers are available.
218 if (!HasStandardValidationLayer(instanceLayerProps))
219 {
220 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400221 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
222 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500223 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500224 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500225 }
226 else
227 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500228 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500229 }
230 mEnableValidationLayers = false;
231 }
232 }
233
Jamie Madille09bd5d2016-11-29 16:20:35 -0500234 std::vector<const char *> enabledInstanceExtensions;
235 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500236 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500237
Jamie Madill0448ec82016-12-23 13:41:47 -0500238 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
239 if (mEnableValidationLayers)
240 {
241 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
242 }
243
Jamie Madille09bd5d2016-11-29 16:20:35 -0500244 // Verify the required extensions are in the extension names set. Fail if not.
245 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
246
Jamie Madill327ba852016-11-30 12:38:28 -0500247 VkApplicationInfo applicationInfo;
248 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
249 applicationInfo.pNext = nullptr;
250 applicationInfo.pApplicationName = "ANGLE";
251 applicationInfo.applicationVersion = 1;
252 applicationInfo.pEngineName = "ANGLE";
253 applicationInfo.engineVersion = 1;
254 applicationInfo.apiVersion = VK_API_VERSION_1_0;
255
256 VkInstanceCreateInfo instanceInfo;
257 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
258 instanceInfo.pNext = nullptr;
259 instanceInfo.flags = 0;
260 instanceInfo.pApplicationInfo = &applicationInfo;
261
Jamie Madille09bd5d2016-11-29 16:20:35 -0500262 // Enable requested layers and extensions.
263 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
264 instanceInfo.ppEnabledExtensionNames =
265 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500266 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
267 instanceInfo.ppEnabledLayerNames =
268 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500269
270 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
271
Jamie Madill0448ec82016-12-23 13:41:47 -0500272 if (mEnableValidationLayers)
273 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500274 // Change back to the previous working directory now that we've loaded the instance -
275 // the validation layers should be loaded at this point.
276 angle::SetCWD(previousCWD.c_str());
277
Jamie Madill0448ec82016-12-23 13:41:47 -0500278 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
279
280 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
281 debugReportInfo.pNext = nullptr;
282 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
283 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
284 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
285 debugReportInfo.pfnCallback = &DebugReportCallback;
286 debugReportInfo.pUserData = this;
287
288 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
289 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
290 ASSERT(createDebugReportCallback);
291 ANGLE_VK_TRY(
292 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
293 }
294
Jamie Madill4d0bf552016-12-28 15:45:24 -0500295 uint32_t physicalDeviceCount = 0;
296 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
297 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
298
299 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
300 physicalDeviceCount = 1;
301 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
302
303 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
304
305 // Ensure we can find a graphics queue family.
306 uint32_t queueCount = 0;
307 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
308
309 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
310
311 mQueueFamilyProperties.resize(queueCount);
312 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
313 mQueueFamilyProperties.data());
314
315 size_t graphicsQueueFamilyCount = false;
316 uint32_t firstGraphicsQueueFamily = 0;
317 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
318 {
319 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
320 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
321 {
322 ASSERT(queueInfo.queueCount > 0);
323 graphicsQueueFamilyCount++;
324 if (firstGraphicsQueueFamily == 0)
325 {
326 firstGraphicsQueueFamily = familyIndex;
327 }
328 break;
329 }
330 }
331
332 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
333
334 // If only one queue family, go ahead and initialize the device. If there is more than one
335 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
336 if (graphicsQueueFamilyCount == 1)
337 {
338 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
339 }
340
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500341 VkPhysicalDeviceMemoryProperties memoryProperties;
342 vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &memoryProperties);
343
344 for (uint32_t memoryIndex = 0; memoryIndex < memoryProperties.memoryTypeCount; ++memoryIndex)
345 {
346 if ((memoryProperties.memoryTypes[memoryIndex].propertyFlags &
347 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
348 {
349 mHostVisibleMemoryIndex = memoryIndex;
350 break;
351 }
352 }
353
354 ANGLE_VK_CHECK(mHostVisibleMemoryIndex < std::numeric_limits<uint32_t>::max(),
355 VK_ERROR_INITIALIZATION_FAILED);
356
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500357 mGlslangWrapper = GlslangWrapper::GetReference();
358
Jamie Madill327ba852016-11-30 12:38:28 -0500359 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400360}
361
Jamie Madill4d0bf552016-12-28 15:45:24 -0500362vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
363{
364 uint32_t deviceLayerCount = 0;
365 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
366
367 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
368 if (deviceLayerCount > 0)
369 {
370 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
371 deviceLayerProps.data()));
372 }
373
374 uint32_t deviceExtensionCount = 0;
375 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
376 &deviceExtensionCount, nullptr));
377
378 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
379 if (deviceExtensionCount > 0)
380 {
381 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
382 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
383 }
384
385 if (mEnableValidationLayers)
386 {
387 if (!HasStandardValidationLayer(deviceLayerProps))
388 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500389 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500390 mEnableValidationLayers = false;
391 }
392 }
393
394 std::vector<const char *> enabledDeviceExtensions;
395 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
396
397 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
398
399 VkDeviceQueueCreateInfo queueCreateInfo;
400
401 float zeroPriority = 0.0f;
402
403 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
404 queueCreateInfo.pNext = nullptr;
405 queueCreateInfo.flags = 0;
406 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
407 queueCreateInfo.queueCount = 1;
408 queueCreateInfo.pQueuePriorities = &zeroPriority;
409
410 // Initialize the device
411 VkDeviceCreateInfo createInfo;
412
413 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
414 createInfo.pNext = nullptr;
415 createInfo.flags = 0;
416 createInfo.queueCreateInfoCount = 1;
417 createInfo.pQueueCreateInfos = &queueCreateInfo;
418 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
419 createInfo.ppEnabledLayerNames =
420 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
421 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
422 createInfo.ppEnabledExtensionNames =
423 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
424 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
425
426 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
427
428 mCurrentQueueFamilyIndex = queueFamilyIndex;
429
430 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
431
432 // Initialize the command pool now that we know the queue family index.
433 VkCommandPoolCreateInfo commandPoolInfo;
434 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
435 commandPoolInfo.pNext = nullptr;
436 // TODO(jmadill): Investigate transient command buffers.
437 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
438 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
439
Jamie Madill5deea722017-02-16 10:44:46 -0500440 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500441
Jamie Madill5deea722017-02-16 10:44:46 -0500442 mCommandBuffer.setCommandPool(&mCommandPool);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500443
444 return vk::NoError();
445}
446
447vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
448{
449 // We've already initialized a device, and can't re-create it unless it's never been used.
450 // TODO(jmadill): Handle the re-creation case if necessary.
451 if (mDevice != VK_NULL_HANDLE)
452 {
453 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
454
455 // Check if the current device supports present on this surface.
456 VkBool32 supportsPresent = VK_FALSE;
457 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
458 surface, &supportsPresent));
459
460 return (supportsPresent == VK_TRUE);
461 }
462
463 // Find a graphics and present queue.
464 Optional<uint32_t> newPresentQueue;
465 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
466 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
467 {
468 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
469 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
470 {
471 VkBool32 supportsPresent = VK_FALSE;
472 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
473 &supportsPresent));
474
475 if (supportsPresent == VK_TRUE)
476 {
477 newPresentQueue = queueIndex;
478 break;
479 }
480 }
481 }
482
483 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
484 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
485
486 return newPresentQueue.value();
487}
488
489std::string RendererVk::getVendorString() const
490{
491 switch (mPhysicalDeviceProperties.vendorID)
492 {
493 case VENDOR_ID_AMD:
494 return "Advanced Micro Devices";
495 case VENDOR_ID_NVIDIA:
496 return "NVIDIA";
497 case VENDOR_ID_INTEL:
498 return "Intel";
499 default:
500 {
501 // TODO(jmadill): More vendor IDs.
502 std::stringstream strstr;
503 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
504 return strstr.str();
505 }
506 }
507}
508
Jamie Madille09bd5d2016-11-29 16:20:35 -0500509std::string RendererVk::getRendererDescription() const
510{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500511 std::stringstream strstr;
512
513 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
514
515 strstr << "Vulkan ";
516 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
517 strstr << VK_VERSION_MINOR(apiVersion) << ".";
518 strstr << VK_VERSION_PATCH(apiVersion);
519
520 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
521
522 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500523}
524
Jamie Madillacccc6c2016-05-03 17:22:10 -0400525void RendererVk::ensureCapsInitialized() const
526{
527 if (!mCapsInitialized)
528 {
529 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
530 mCapsInitialized = true;
531 }
532}
533
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500534void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400535 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800536 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400537 gl::Limitations * /* outLimitations */) const
538{
Jamie Madill327ba852016-11-30 12:38:28 -0500539 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500540 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800541 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
542 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill2a9e1072017-09-22 11:31:57 -0400543 outCaps->maxVaryingVectors = 16;
Jamie Madillb8353b02017-01-25 12:57:21 -0800544
545 // Enable this for simple buffer readback testing, but some functionality is missing.
546 // TODO(jmadill): Support full mapBufferRange extension.
547 outExtensions->mapBuffer = true;
548 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400549}
550
551const gl::Caps &RendererVk::getNativeCaps() const
552{
553 ensureCapsInitialized();
554 return mNativeCaps;
555}
556
557const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
558{
559 ensureCapsInitialized();
560 return mNativeTextureCaps;
561}
562
563const gl::Extensions &RendererVk::getNativeExtensions() const
564{
565 ensureCapsInitialized();
566 return mNativeExtensions;
567}
568
569const gl::Limitations &RendererVk::getNativeLimitations() const
570{
571 ensureCapsInitialized();
572 return mNativeLimitations;
573}
574
Jamie Madill0c0dc342017-03-24 14:18:51 -0400575vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500576{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400577 ANGLE_TRY(mCommandBuffer.begin(mDevice));
578 *commandBufferOut = &mCommandBuffer;
579 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500580}
581
Jamie Madill0c0dc342017-03-24 14:18:51 -0400582vk::Error RendererVk::submitCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500583{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400584 ANGLE_TRY(commandBuffer->end());
585
Jamie Madill4d0bf552016-12-28 15:45:24 -0500586 VkFenceCreateInfo fenceInfo;
587 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
588 fenceInfo.pNext = nullptr;
589 fenceInfo.flags = 0;
590
Jamie Madill4d0bf552016-12-28 15:45:24 -0500591 VkSubmitInfo submitInfo;
592 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
593 submitInfo.pNext = nullptr;
594 submitInfo.waitSemaphoreCount = 0;
595 submitInfo.pWaitSemaphores = nullptr;
596 submitInfo.pWaitDstStageMask = nullptr;
597 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400598 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500599 submitInfo.signalSemaphoreCount = 0;
600 submitInfo.pSignalSemaphores = nullptr;
601
602 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500603 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500604
Jamie Madillf651c772017-02-21 15:03:51 -0500605 return vk::NoError();
606}
607
Jamie Madill0c0dc342017-03-24 14:18:51 -0400608vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500609{
610 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500611 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500612
613 return vk::NoError();
614}
615
Jamie Madill0c0dc342017-03-24 14:18:51 -0400616vk::Error RendererVk::submitCommandsWithSync(vk::CommandBuffer *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400617 const vk::Semaphore &waitSemaphore,
618 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500619{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400620 ANGLE_TRY(commandBuffer->end());
621
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500622 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
623
624 VkSubmitInfo submitInfo;
625 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
626 submitInfo.pNext = nullptr;
627 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400628 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500629 submitInfo.pWaitDstStageMask = &waitStageMask;
630 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400631 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400632 submitInfo.signalSemaphoreCount = 1;
633 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500634
635 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400636 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500637
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500638 return vk::NoError();
639}
640
Jamie Madill4c26fc22017-02-24 11:04:10 -0500641vk::Error RendererVk::finish()
642{
643 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500644 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400645 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500646 return vk::NoError();
647}
648
Jamie Madill0c0dc342017-03-24 14:18:51 -0400649void RendererVk::freeAllInFlightResources()
650{
651 for (auto &fence : mInFlightFences)
652 {
653 fence.destroy(mDevice);
654 }
655 mInFlightFences.clear();
656
657 for (auto &command : mInFlightCommands)
658 {
659 command.destroy(mDevice);
660 }
661 mInFlightCommands.clear();
662
663 for (auto &garbage : mGarbage)
664 {
665 garbage->destroy(mDevice);
666 }
667 mGarbage.clear();
668}
669
Jamie Madill4c26fc22017-02-24 11:04:10 -0500670vk::Error RendererVk::checkInFlightCommands()
671{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400672 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500673
Jamie Madill4c26fc22017-02-24 11:04:10 -0500674 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400675 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500676 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400677 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500678
Jamie Madill0c0dc342017-03-24 14:18:51 -0400679 VkResult result = inFlightFence->get().getStatus(mDevice);
680 if (result == VK_NOT_READY)
681 break;
682 ANGLE_VK_TRY(result);
683 finishedIndex = index + 1;
684
685 // Release the fence handle.
686 // TODO(jmadill): Re-use fences.
687 inFlightFence->destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500688 }
689
Jamie Madill0c0dc342017-03-24 14:18:51 -0400690 if (finishedIndex == 0)
691 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500692
Jamie Madill0c0dc342017-03-24 14:18:51 -0400693 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
694 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
695
696 size_t completedCBIndex = 0;
697 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
698 {
699 auto *inFlightCB = &mInFlightCommands[cbIndex];
700 if (inFlightCB->queueSerial() > finishedSerial)
701 break;
702
703 completedCBIndex = cbIndex + 1;
704 inFlightCB->destroy(mDevice);
705 }
706
707 if (completedCBIndex == 0)
708 return vk::NoError();
709
710 mInFlightCommands.erase(mInFlightCommands.begin(),
711 mInFlightCommands.begin() + completedCBIndex);
712
713 size_t freeIndex = 0;
714 for (; freeIndex < mGarbage.size(); ++freeIndex)
715 {
716 if (!mGarbage[freeIndex]->destroyIfComplete(mDevice, finishedSerial))
717 break;
718 }
719
720 // Remove the entries from the garbage list - they should be ready to go.
721 if (freeIndex > 0)
722 {
723 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500724 }
725
Jamie Madill4c26fc22017-02-24 11:04:10 -0500726 return vk::NoError();
727}
728
729vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
730{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400731 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500732
733 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400734 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500735
736 // Sanity check.
737 ASSERT(mInFlightCommands.size() < 1000u);
738
Jamie Madill0c0dc342017-03-24 14:18:51 -0400739 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400740 // TODO(jmadill): Overflow check.
741 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400742
743 return vk::NoError();
744}
745
746vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
747{
748 VkFenceCreateInfo createInfo;
749 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
750 createInfo.pNext = nullptr;
751 createInfo.flags = 0;
752
753 vk::Fence fence;
754 ANGLE_TRY(fence.init(mDevice, createInfo));
755
756 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
757
758 // Store this command buffer in the in-flight list.
759 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
760 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
761
762 // Sanity check.
763 ASSERT(mInFlightCommands.size() < 1000u);
764
765 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400766 // TODO(jmadill): Overflow check.
767 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400768
769 ANGLE_TRY(checkInFlightCommands());
770
Jamie Madill4c26fc22017-02-24 11:04:10 -0500771 return vk::NoError();
772}
773
Jamie Madill5deea722017-02-16 10:44:46 -0500774vk::Error RendererVk::createStagingImage(TextureDimension dimension,
775 const vk::Format &format,
776 const gl::Extents &extent,
777 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500778{
779 ASSERT(mHostVisibleMemoryIndex != std::numeric_limits<uint32_t>::max());
780
Jamie Madill5deea722017-02-16 10:44:46 -0500781 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mHostVisibleMemoryIndex, dimension,
782 format.native, extent));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500783
Jamie Madill5deea722017-02-16 10:44:46 -0500784 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500785}
786
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500787GlslangWrapper *RendererVk::getGlslangWrapper()
788{
789 return mGlslangWrapper;
790}
791
Jamie Madill4c26fc22017-02-24 11:04:10 -0500792Serial RendererVk::getCurrentQueueSerial() const
793{
794 return mCurrentQueueSerial;
795}
796
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400797} // namespace rx