blob: 749733b7e3d2bfb8f6a174b8d80caf9daa51469d [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
44 for (const auto &extensionName : enabledExtensionNames)
45 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill0448ec82016-12-23 13:41:47 -050087RendererVk::RendererVk()
88 : mCapsInitialized(false),
89 mInstance(VK_NULL_HANDLE),
90 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -050091 mDebugReportCallback(VK_NULL_HANDLE),
92 mPhysicalDevice(VK_NULL_HANDLE),
93 mQueue(VK_NULL_HANDLE),
94 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
95 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -050096 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -040097 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
98 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill4c26fc22017-02-24 11:04:10 -050099 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400100{
101}
102
103RendererVk::~RendererVk()
104{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400105 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500106 {
107 vk::Error error = finish();
108 if (error.isError())
109 {
110 ERR() << "Error during VK shutdown: " << error;
111 }
112 }
113
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500114 if (mGlslangWrapper)
115 {
116 GlslangWrapper::ReleaseReference();
117 mGlslangWrapper = nullptr;
118 }
119
Jamie Madill5deea722017-02-16 10:44:46 -0500120 if (mCommandBuffer.valid())
121 {
122 mCommandBuffer.destroy(mDevice);
123 }
124
125 if (mCommandPool.valid())
126 {
127 mCommandPool.destroy(mDevice);
128 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500129
130 if (mDevice)
131 {
132 vkDestroyDevice(mDevice, nullptr);
133 mDevice = VK_NULL_HANDLE;
134 }
135
Jamie Madill0448ec82016-12-23 13:41:47 -0500136 if (mDebugReportCallback)
137 {
138 ASSERT(mInstance);
139 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
140 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
141 ASSERT(destroyDebugReportCallback);
142 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
143 }
144
Jamie Madill4d0bf552016-12-28 15:45:24 -0500145 if (mInstance)
146 {
147 vkDestroyInstance(mInstance, nullptr);
148 mInstance = VK_NULL_HANDLE;
149 }
150
151 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500152}
153
Frank Henigman29f148b2016-11-23 21:05:36 -0500154vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500155{
Jamie Madill222c5172017-07-19 16:15:42 -0400156 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500157
158 // If we're loading the validation layers, we could be running from any random directory.
159 // Change to the executable directory so we can find the layers, then change back to the
160 // previous directory to be safe we don't disrupt the application.
161 std::string previousCWD;
162
163 if (mEnableValidationLayers)
164 {
165 const auto &cwd = angle::GetCWD();
166 if (!cwd.valid())
167 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500168 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500169 mEnableValidationLayers = false;
170 }
171 else
172 {
173 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400174 const char *exeDir = angle::GetExecutableDirectory();
175 if (!angle::SetCWD(exeDir))
176 {
177 ERR() << "Error setting CWD for Vulkan layers init.";
178 mEnableValidationLayers = false;
179 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500180 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400181 }
182
183 // Override environment variable to use the ANGLE layers.
184 if (mEnableValidationLayers)
185 {
186 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
187 {
188 ERR() << "Error setting environment for Vulkan layers init.";
189 mEnableValidationLayers = false;
190 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500191 }
192
Jamie Madill0448ec82016-12-23 13:41:47 -0500193 // Gather global layer properties.
194 uint32_t instanceLayerCount = 0;
195 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
196
197 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
198 if (instanceLayerCount > 0)
199 {
200 ANGLE_VK_TRY(
201 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
202 }
203
Jamie Madille09bd5d2016-11-29 16:20:35 -0500204 uint32_t instanceExtensionCount = 0;
205 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
206
207 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
208 if (instanceExtensionCount > 0)
209 {
210 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
211 instanceExtensionProps.data()));
212 }
213
Jamie Madill0448ec82016-12-23 13:41:47 -0500214 if (mEnableValidationLayers)
215 {
216 // Verify the standard validation layers are available.
217 if (!HasStandardValidationLayer(instanceLayerProps))
218 {
219 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400220 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
221 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500222 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500223 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500224 }
225 else
226 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500227 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500228 }
229 mEnableValidationLayers = false;
230 }
231 }
232
Jamie Madille09bd5d2016-11-29 16:20:35 -0500233 std::vector<const char *> enabledInstanceExtensions;
234 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500235 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500236
Jamie Madill0448ec82016-12-23 13:41:47 -0500237 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
238 if (mEnableValidationLayers)
239 {
240 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
241 }
242
Jamie Madille09bd5d2016-11-29 16:20:35 -0500243 // Verify the required extensions are in the extension names set. Fail if not.
244 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
245
Jamie Madill327ba852016-11-30 12:38:28 -0500246 VkApplicationInfo applicationInfo;
247 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
248 applicationInfo.pNext = nullptr;
249 applicationInfo.pApplicationName = "ANGLE";
250 applicationInfo.applicationVersion = 1;
251 applicationInfo.pEngineName = "ANGLE";
252 applicationInfo.engineVersion = 1;
253 applicationInfo.apiVersion = VK_API_VERSION_1_0;
254
255 VkInstanceCreateInfo instanceInfo;
256 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
257 instanceInfo.pNext = nullptr;
258 instanceInfo.flags = 0;
259 instanceInfo.pApplicationInfo = &applicationInfo;
260
Jamie Madille09bd5d2016-11-29 16:20:35 -0500261 // Enable requested layers and extensions.
262 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
263 instanceInfo.ppEnabledExtensionNames =
264 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500265 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
266 instanceInfo.ppEnabledLayerNames =
267 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500268
269 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
270
Jamie Madill0448ec82016-12-23 13:41:47 -0500271 if (mEnableValidationLayers)
272 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500273 // Change back to the previous working directory now that we've loaded the instance -
274 // the validation layers should be loaded at this point.
275 angle::SetCWD(previousCWD.c_str());
276
Jamie Madill0448ec82016-12-23 13:41:47 -0500277 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
278
279 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
280 debugReportInfo.pNext = nullptr;
281 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
282 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
283 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
284 debugReportInfo.pfnCallback = &DebugReportCallback;
285 debugReportInfo.pUserData = this;
286
287 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
288 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
289 ASSERT(createDebugReportCallback);
290 ANGLE_VK_TRY(
291 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
292 }
293
Jamie Madill4d0bf552016-12-28 15:45:24 -0500294 uint32_t physicalDeviceCount = 0;
295 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
296 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
297
298 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
299 physicalDeviceCount = 1;
300 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
301
302 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
303
304 // Ensure we can find a graphics queue family.
305 uint32_t queueCount = 0;
306 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
307
308 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
309
310 mQueueFamilyProperties.resize(queueCount);
311 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
312 mQueueFamilyProperties.data());
313
314 size_t graphicsQueueFamilyCount = false;
315 uint32_t firstGraphicsQueueFamily = 0;
316 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
317 {
318 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
319 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
320 {
321 ASSERT(queueInfo.queueCount > 0);
322 graphicsQueueFamilyCount++;
323 if (firstGraphicsQueueFamily == 0)
324 {
325 firstGraphicsQueueFamily = familyIndex;
326 }
327 break;
328 }
329 }
330
331 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
332
333 // If only one queue family, go ahead and initialize the device. If there is more than one
334 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
335 if (graphicsQueueFamilyCount == 1)
336 {
337 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
338 }
339
Jamie Madill035fd6b2017-10-03 15:43:22 -0400340 // Store the physical device memory properties so we can find the right memory pools.
341 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500342
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500343 mGlslangWrapper = GlslangWrapper::GetReference();
344
Jamie Madill327ba852016-11-30 12:38:28 -0500345 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400346}
347
Jamie Madill4d0bf552016-12-28 15:45:24 -0500348vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
349{
350 uint32_t deviceLayerCount = 0;
351 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
352
353 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
354 if (deviceLayerCount > 0)
355 {
356 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
357 deviceLayerProps.data()));
358 }
359
360 uint32_t deviceExtensionCount = 0;
361 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
362 &deviceExtensionCount, nullptr));
363
364 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
365 if (deviceExtensionCount > 0)
366 {
367 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
368 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
369 }
370
371 if (mEnableValidationLayers)
372 {
373 if (!HasStandardValidationLayer(deviceLayerProps))
374 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500375 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500376 mEnableValidationLayers = false;
377 }
378 }
379
380 std::vector<const char *> enabledDeviceExtensions;
381 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
382
383 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
384
385 VkDeviceQueueCreateInfo queueCreateInfo;
386
387 float zeroPriority = 0.0f;
388
389 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
390 queueCreateInfo.pNext = nullptr;
391 queueCreateInfo.flags = 0;
392 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
393 queueCreateInfo.queueCount = 1;
394 queueCreateInfo.pQueuePriorities = &zeroPriority;
395
396 // Initialize the device
397 VkDeviceCreateInfo createInfo;
398
399 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
400 createInfo.pNext = nullptr;
401 createInfo.flags = 0;
402 createInfo.queueCreateInfoCount = 1;
403 createInfo.pQueueCreateInfos = &queueCreateInfo;
404 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
405 createInfo.ppEnabledLayerNames =
406 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
407 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
408 createInfo.ppEnabledExtensionNames =
409 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
410 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
411
412 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
413
414 mCurrentQueueFamilyIndex = queueFamilyIndex;
415
416 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
417
418 // Initialize the command pool now that we know the queue family index.
419 VkCommandPoolCreateInfo commandPoolInfo;
420 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
421 commandPoolInfo.pNext = nullptr;
422 // TODO(jmadill): Investigate transient command buffers.
423 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
424 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
425
Jamie Madill5deea722017-02-16 10:44:46 -0500426 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500427
Jamie Madill5deea722017-02-16 10:44:46 -0500428 mCommandBuffer.setCommandPool(&mCommandPool);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500429
430 return vk::NoError();
431}
432
433vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
434{
435 // We've already initialized a device, and can't re-create it unless it's never been used.
436 // TODO(jmadill): Handle the re-creation case if necessary.
437 if (mDevice != VK_NULL_HANDLE)
438 {
439 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
440
441 // Check if the current device supports present on this surface.
442 VkBool32 supportsPresent = VK_FALSE;
443 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
444 surface, &supportsPresent));
445
446 return (supportsPresent == VK_TRUE);
447 }
448
449 // Find a graphics and present queue.
450 Optional<uint32_t> newPresentQueue;
451 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
452 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
453 {
454 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
455 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
456 {
457 VkBool32 supportsPresent = VK_FALSE;
458 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
459 &supportsPresent));
460
461 if (supportsPresent == VK_TRUE)
462 {
463 newPresentQueue = queueIndex;
464 break;
465 }
466 }
467 }
468
469 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
470 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
471
472 return newPresentQueue.value();
473}
474
475std::string RendererVk::getVendorString() const
476{
477 switch (mPhysicalDeviceProperties.vendorID)
478 {
479 case VENDOR_ID_AMD:
480 return "Advanced Micro Devices";
481 case VENDOR_ID_NVIDIA:
482 return "NVIDIA";
483 case VENDOR_ID_INTEL:
484 return "Intel";
485 default:
486 {
487 // TODO(jmadill): More vendor IDs.
488 std::stringstream strstr;
489 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
490 return strstr.str();
491 }
492 }
493}
494
Jamie Madille09bd5d2016-11-29 16:20:35 -0500495std::string RendererVk::getRendererDescription() const
496{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500497 std::stringstream strstr;
498
499 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
500
501 strstr << "Vulkan ";
502 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
503 strstr << VK_VERSION_MINOR(apiVersion) << ".";
504 strstr << VK_VERSION_PATCH(apiVersion);
505
506 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
507
508 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500509}
510
Jamie Madillacccc6c2016-05-03 17:22:10 -0400511void RendererVk::ensureCapsInitialized() const
512{
513 if (!mCapsInitialized)
514 {
515 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
516 mCapsInitialized = true;
517 }
518}
519
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500520void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400521 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800522 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400523 gl::Limitations * /* outLimitations */) const
524{
Jamie Madill327ba852016-11-30 12:38:28 -0500525 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500526 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800527 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
528 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400529 outCaps->maxVaryingVectors = 16;
530 outCaps->maxTextureImageUnits = 1;
531 outCaps->maxCombinedTextureImageUnits = 1;
532 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400533 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400534 outCaps->maxFragmentUniformVectors = 8;
535 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400536 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800537
538 // Enable this for simple buffer readback testing, but some functionality is missing.
539 // TODO(jmadill): Support full mapBufferRange extension.
540 outExtensions->mapBuffer = true;
541 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400542}
543
544const gl::Caps &RendererVk::getNativeCaps() const
545{
546 ensureCapsInitialized();
547 return mNativeCaps;
548}
549
550const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
551{
552 ensureCapsInitialized();
553 return mNativeTextureCaps;
554}
555
556const gl::Extensions &RendererVk::getNativeExtensions() const
557{
558 ensureCapsInitialized();
559 return mNativeExtensions;
560}
561
562const gl::Limitations &RendererVk::getNativeLimitations() const
563{
564 ensureCapsInitialized();
565 return mNativeLimitations;
566}
567
Jamie Madill0c0dc342017-03-24 14:18:51 -0400568vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500569{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400570 ANGLE_TRY(mCommandBuffer.begin(mDevice));
571 *commandBufferOut = &mCommandBuffer;
572 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500573}
574
Jamie Madill0c0dc342017-03-24 14:18:51 -0400575vk::Error RendererVk::submitCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500576{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400577 ANGLE_TRY(commandBuffer->end());
578
Jamie Madill4d0bf552016-12-28 15:45:24 -0500579 VkFenceCreateInfo fenceInfo;
580 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
581 fenceInfo.pNext = nullptr;
582 fenceInfo.flags = 0;
583
Jamie Madill4d0bf552016-12-28 15:45:24 -0500584 VkSubmitInfo submitInfo;
585 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
586 submitInfo.pNext = nullptr;
587 submitInfo.waitSemaphoreCount = 0;
588 submitInfo.pWaitSemaphores = nullptr;
589 submitInfo.pWaitDstStageMask = nullptr;
590 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400591 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500592 submitInfo.signalSemaphoreCount = 0;
593 submitInfo.pSignalSemaphores = nullptr;
594
595 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500596 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500597
Jamie Madillf651c772017-02-21 15:03:51 -0500598 return vk::NoError();
599}
600
Jamie Madill0c0dc342017-03-24 14:18:51 -0400601vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500602{
603 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500604 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500605
606 return vk::NoError();
607}
608
Jamie Madill0c0dc342017-03-24 14:18:51 -0400609vk::Error RendererVk::submitCommandsWithSync(vk::CommandBuffer *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400610 const vk::Semaphore &waitSemaphore,
611 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500612{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400613 ANGLE_TRY(commandBuffer->end());
614
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500615 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
616
617 VkSubmitInfo submitInfo;
618 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
619 submitInfo.pNext = nullptr;
620 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400621 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500622 submitInfo.pWaitDstStageMask = &waitStageMask;
623 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400624 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400625 submitInfo.signalSemaphoreCount = 1;
626 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500627
628 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400629 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500630
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500631 return vk::NoError();
632}
633
Jamie Madill4c26fc22017-02-24 11:04:10 -0500634vk::Error RendererVk::finish()
635{
636 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500637 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400638 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500639 return vk::NoError();
640}
641
Jamie Madill0c0dc342017-03-24 14:18:51 -0400642void RendererVk::freeAllInFlightResources()
643{
644 for (auto &fence : mInFlightFences)
645 {
646 fence.destroy(mDevice);
647 }
648 mInFlightFences.clear();
649
650 for (auto &command : mInFlightCommands)
651 {
652 command.destroy(mDevice);
653 }
654 mInFlightCommands.clear();
655
656 for (auto &garbage : mGarbage)
657 {
658 garbage->destroy(mDevice);
659 }
660 mGarbage.clear();
661}
662
Jamie Madill4c26fc22017-02-24 11:04:10 -0500663vk::Error RendererVk::checkInFlightCommands()
664{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400665 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500666
Jamie Madill4c26fc22017-02-24 11:04:10 -0500667 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400668 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500669 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400670 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500671
Jamie Madill0c0dc342017-03-24 14:18:51 -0400672 VkResult result = inFlightFence->get().getStatus(mDevice);
673 if (result == VK_NOT_READY)
674 break;
675 ANGLE_VK_TRY(result);
676 finishedIndex = index + 1;
677
678 // Release the fence handle.
679 // TODO(jmadill): Re-use fences.
680 inFlightFence->destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500681 }
682
Jamie Madill0c0dc342017-03-24 14:18:51 -0400683 if (finishedIndex == 0)
684 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500685
Jamie Madill0c0dc342017-03-24 14:18:51 -0400686 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
687 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
688
689 size_t completedCBIndex = 0;
690 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
691 {
692 auto *inFlightCB = &mInFlightCommands[cbIndex];
693 if (inFlightCB->queueSerial() > finishedSerial)
694 break;
695
696 completedCBIndex = cbIndex + 1;
697 inFlightCB->destroy(mDevice);
698 }
699
700 if (completedCBIndex == 0)
701 return vk::NoError();
702
703 mInFlightCommands.erase(mInFlightCommands.begin(),
704 mInFlightCommands.begin() + completedCBIndex);
705
706 size_t freeIndex = 0;
707 for (; freeIndex < mGarbage.size(); ++freeIndex)
708 {
709 if (!mGarbage[freeIndex]->destroyIfComplete(mDevice, finishedSerial))
710 break;
711 }
712
713 // Remove the entries from the garbage list - they should be ready to go.
714 if (freeIndex > 0)
715 {
716 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500717 }
718
Jamie Madill4c26fc22017-02-24 11:04:10 -0500719 return vk::NoError();
720}
721
722vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
723{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400724 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500725
726 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400727 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500728
729 // Sanity check.
730 ASSERT(mInFlightCommands.size() < 1000u);
731
Jamie Madill0c0dc342017-03-24 14:18:51 -0400732 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400733 // TODO(jmadill): Overflow check.
734 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400735
736 return vk::NoError();
737}
738
739vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
740{
741 VkFenceCreateInfo createInfo;
742 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
743 createInfo.pNext = nullptr;
744 createInfo.flags = 0;
745
746 vk::Fence fence;
747 ANGLE_TRY(fence.init(mDevice, createInfo));
748
749 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
750
751 // Store this command buffer in the in-flight list.
752 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
753 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
754
755 // Sanity check.
756 ASSERT(mInFlightCommands.size() < 1000u);
757
758 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400759 // TODO(jmadill): Overflow check.
760 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400761
762 ANGLE_TRY(checkInFlightCommands());
763
Jamie Madill4c26fc22017-02-24 11:04:10 -0500764 return vk::NoError();
765}
766
Jamie Madill5deea722017-02-16 10:44:46 -0500767vk::Error RendererVk::createStagingImage(TextureDimension dimension,
768 const vk::Format &format,
769 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400770 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500771 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500772{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400773 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
774 format.native, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500775 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500776}
777
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500778GlslangWrapper *RendererVk::getGlslangWrapper()
779{
780 return mGlslangWrapper;
781}
782
Jamie Madill4c26fc22017-02-24 11:04:10 -0500783Serial RendererVk::getCurrentQueueSerial() const
784{
785 return mCurrentQueueSerial;
786}
787
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400788} // namespace rx