blob: 9580e3514d45f91dac244070c1cb43d9aa827055 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
44 for (const auto &extensionName : enabledExtensionNames)
45 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill0448ec82016-12-23 13:41:47 -050087RendererVk::RendererVk()
88 : mCapsInitialized(false),
89 mInstance(VK_NULL_HANDLE),
90 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -050091 mDebugReportCallback(VK_NULL_HANDLE),
92 mPhysicalDevice(VK_NULL_HANDLE),
93 mQueue(VK_NULL_HANDLE),
94 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
95 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -050096 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -040097 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
98 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill4c26fc22017-02-24 11:04:10 -050099 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400100{
101}
102
103RendererVk::~RendererVk()
104{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400105 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500106 {
107 vk::Error error = finish();
108 if (error.isError())
109 {
110 ERR() << "Error during VK shutdown: " << error;
111 }
112 }
113
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500114 if (mGlslangWrapper)
115 {
116 GlslangWrapper::ReleaseReference();
117 mGlslangWrapper = nullptr;
118 }
119
Jamie Madill5deea722017-02-16 10:44:46 -0500120 if (mCommandBuffer.valid())
121 {
122 mCommandBuffer.destroy(mDevice);
123 }
124
125 if (mCommandPool.valid())
126 {
127 mCommandPool.destroy(mDevice);
128 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500129
130 if (mDevice)
131 {
132 vkDestroyDevice(mDevice, nullptr);
133 mDevice = VK_NULL_HANDLE;
134 }
135
Jamie Madill0448ec82016-12-23 13:41:47 -0500136 if (mDebugReportCallback)
137 {
138 ASSERT(mInstance);
139 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
140 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
141 ASSERT(destroyDebugReportCallback);
142 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
143 }
144
Jamie Madill4d0bf552016-12-28 15:45:24 -0500145 if (mInstance)
146 {
147 vkDestroyInstance(mInstance, nullptr);
148 mInstance = VK_NULL_HANDLE;
149 }
150
151 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500152}
153
Frank Henigman29f148b2016-11-23 21:05:36 -0500154vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500155{
Jamie Madill222c5172017-07-19 16:15:42 -0400156 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500157
158 // If we're loading the validation layers, we could be running from any random directory.
159 // Change to the executable directory so we can find the layers, then change back to the
160 // previous directory to be safe we don't disrupt the application.
161 std::string previousCWD;
162
163 if (mEnableValidationLayers)
164 {
165 const auto &cwd = angle::GetCWD();
166 if (!cwd.valid())
167 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500168 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500169 mEnableValidationLayers = false;
170 }
171 else
172 {
173 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400174 const char *exeDir = angle::GetExecutableDirectory();
175 if (!angle::SetCWD(exeDir))
176 {
177 ERR() << "Error setting CWD for Vulkan layers init.";
178 mEnableValidationLayers = false;
179 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500180 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400181 }
182
183 // Override environment variable to use the ANGLE layers.
184 if (mEnableValidationLayers)
185 {
186 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
187 {
188 ERR() << "Error setting environment for Vulkan layers init.";
189 mEnableValidationLayers = false;
190 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500191 }
192
Jamie Madill0448ec82016-12-23 13:41:47 -0500193 // Gather global layer properties.
194 uint32_t instanceLayerCount = 0;
195 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
196
197 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
198 if (instanceLayerCount > 0)
199 {
200 ANGLE_VK_TRY(
201 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
202 }
203
Jamie Madille09bd5d2016-11-29 16:20:35 -0500204 uint32_t instanceExtensionCount = 0;
205 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
206
207 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
208 if (instanceExtensionCount > 0)
209 {
210 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
211 instanceExtensionProps.data()));
212 }
213
Jamie Madill0448ec82016-12-23 13:41:47 -0500214 if (mEnableValidationLayers)
215 {
216 // Verify the standard validation layers are available.
217 if (!HasStandardValidationLayer(instanceLayerProps))
218 {
219 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400220 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
221 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500222 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500223 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500224 }
225 else
226 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500227 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500228 }
229 mEnableValidationLayers = false;
230 }
231 }
232
Jamie Madille09bd5d2016-11-29 16:20:35 -0500233 std::vector<const char *> enabledInstanceExtensions;
234 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500235 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500236
Jamie Madill0448ec82016-12-23 13:41:47 -0500237 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
238 if (mEnableValidationLayers)
239 {
240 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
241 }
242
Jamie Madille09bd5d2016-11-29 16:20:35 -0500243 // Verify the required extensions are in the extension names set. Fail if not.
244 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
245
Jamie Madill327ba852016-11-30 12:38:28 -0500246 VkApplicationInfo applicationInfo;
247 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
248 applicationInfo.pNext = nullptr;
249 applicationInfo.pApplicationName = "ANGLE";
250 applicationInfo.applicationVersion = 1;
251 applicationInfo.pEngineName = "ANGLE";
252 applicationInfo.engineVersion = 1;
253 applicationInfo.apiVersion = VK_API_VERSION_1_0;
254
255 VkInstanceCreateInfo instanceInfo;
256 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
257 instanceInfo.pNext = nullptr;
258 instanceInfo.flags = 0;
259 instanceInfo.pApplicationInfo = &applicationInfo;
260
Jamie Madille09bd5d2016-11-29 16:20:35 -0500261 // Enable requested layers and extensions.
262 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
263 instanceInfo.ppEnabledExtensionNames =
264 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500265 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
266 instanceInfo.ppEnabledLayerNames =
267 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500268
269 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
270
Jamie Madill0448ec82016-12-23 13:41:47 -0500271 if (mEnableValidationLayers)
272 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500273 // Change back to the previous working directory now that we've loaded the instance -
274 // the validation layers should be loaded at this point.
275 angle::SetCWD(previousCWD.c_str());
276
Jamie Madill0448ec82016-12-23 13:41:47 -0500277 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
278
279 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
280 debugReportInfo.pNext = nullptr;
281 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
282 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
283 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
284 debugReportInfo.pfnCallback = &DebugReportCallback;
285 debugReportInfo.pUserData = this;
286
287 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
288 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
289 ASSERT(createDebugReportCallback);
290 ANGLE_VK_TRY(
291 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
292 }
293
Jamie Madill4d0bf552016-12-28 15:45:24 -0500294 uint32_t physicalDeviceCount = 0;
295 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
296 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
297
298 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
299 physicalDeviceCount = 1;
300 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
301
302 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
303
304 // Ensure we can find a graphics queue family.
305 uint32_t queueCount = 0;
306 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
307
308 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
309
310 mQueueFamilyProperties.resize(queueCount);
311 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
312 mQueueFamilyProperties.data());
313
314 size_t graphicsQueueFamilyCount = false;
315 uint32_t firstGraphicsQueueFamily = 0;
316 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
317 {
318 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
319 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
320 {
321 ASSERT(queueInfo.queueCount > 0);
322 graphicsQueueFamilyCount++;
323 if (firstGraphicsQueueFamily == 0)
324 {
325 firstGraphicsQueueFamily = familyIndex;
326 }
327 break;
328 }
329 }
330
331 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
332
333 // If only one queue family, go ahead and initialize the device. If there is more than one
334 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
335 if (graphicsQueueFamilyCount == 1)
336 {
337 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
338 }
339
Jamie Madill035fd6b2017-10-03 15:43:22 -0400340 // Store the physical device memory properties so we can find the right memory pools.
341 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500342
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500343 mGlslangWrapper = GlslangWrapper::GetReference();
344
Jamie Madill327ba852016-11-30 12:38:28 -0500345 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400346}
347
Jamie Madill4d0bf552016-12-28 15:45:24 -0500348vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
349{
350 uint32_t deviceLayerCount = 0;
351 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
352
353 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
354 if (deviceLayerCount > 0)
355 {
356 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
357 deviceLayerProps.data()));
358 }
359
360 uint32_t deviceExtensionCount = 0;
361 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
362 &deviceExtensionCount, nullptr));
363
364 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
365 if (deviceExtensionCount > 0)
366 {
367 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
368 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
369 }
370
371 if (mEnableValidationLayers)
372 {
373 if (!HasStandardValidationLayer(deviceLayerProps))
374 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500375 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500376 mEnableValidationLayers = false;
377 }
378 }
379
380 std::vector<const char *> enabledDeviceExtensions;
381 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
382
383 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
384
385 VkDeviceQueueCreateInfo queueCreateInfo;
386
387 float zeroPriority = 0.0f;
388
389 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
390 queueCreateInfo.pNext = nullptr;
391 queueCreateInfo.flags = 0;
392 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
393 queueCreateInfo.queueCount = 1;
394 queueCreateInfo.pQueuePriorities = &zeroPriority;
395
396 // Initialize the device
397 VkDeviceCreateInfo createInfo;
398
399 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
400 createInfo.pNext = nullptr;
401 createInfo.flags = 0;
402 createInfo.queueCreateInfoCount = 1;
403 createInfo.pQueueCreateInfos = &queueCreateInfo;
404 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
405 createInfo.ppEnabledLayerNames =
406 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
407 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
408 createInfo.ppEnabledExtensionNames =
409 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
410 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
411
412 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
413
414 mCurrentQueueFamilyIndex = queueFamilyIndex;
415
416 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
417
418 // Initialize the command pool now that we know the queue family index.
419 VkCommandPoolCreateInfo commandPoolInfo;
420 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
421 commandPoolInfo.pNext = nullptr;
422 // TODO(jmadill): Investigate transient command buffers.
423 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
424 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
425
Jamie Madill5deea722017-02-16 10:44:46 -0500426 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500427
Jamie Madill5deea722017-02-16 10:44:46 -0500428 mCommandBuffer.setCommandPool(&mCommandPool);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500429
430 return vk::NoError();
431}
432
433vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
434{
435 // We've already initialized a device, and can't re-create it unless it's never been used.
436 // TODO(jmadill): Handle the re-creation case if necessary.
437 if (mDevice != VK_NULL_HANDLE)
438 {
439 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
440
441 // Check if the current device supports present on this surface.
442 VkBool32 supportsPresent = VK_FALSE;
443 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
444 surface, &supportsPresent));
445
446 return (supportsPresent == VK_TRUE);
447 }
448
449 // Find a graphics and present queue.
450 Optional<uint32_t> newPresentQueue;
451 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
452 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
453 {
454 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
455 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
456 {
457 VkBool32 supportsPresent = VK_FALSE;
458 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
459 &supportsPresent));
460
461 if (supportsPresent == VK_TRUE)
462 {
463 newPresentQueue = queueIndex;
464 break;
465 }
466 }
467 }
468
469 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
470 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
471
472 return newPresentQueue.value();
473}
474
475std::string RendererVk::getVendorString() const
476{
477 switch (mPhysicalDeviceProperties.vendorID)
478 {
479 case VENDOR_ID_AMD:
480 return "Advanced Micro Devices";
481 case VENDOR_ID_NVIDIA:
482 return "NVIDIA";
483 case VENDOR_ID_INTEL:
484 return "Intel";
485 default:
486 {
487 // TODO(jmadill): More vendor IDs.
488 std::stringstream strstr;
489 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
490 return strstr.str();
491 }
492 }
493}
494
Jamie Madille09bd5d2016-11-29 16:20:35 -0500495std::string RendererVk::getRendererDescription() const
496{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500497 std::stringstream strstr;
498
499 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
500
501 strstr << "Vulkan ";
502 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
503 strstr << VK_VERSION_MINOR(apiVersion) << ".";
504 strstr << VK_VERSION_PATCH(apiVersion);
505
506 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
507
508 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500509}
510
Jamie Madillacccc6c2016-05-03 17:22:10 -0400511void RendererVk::ensureCapsInitialized() const
512{
513 if (!mCapsInitialized)
514 {
515 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
516 mCapsInitialized = true;
517 }
518}
519
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500520void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400521 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800522 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400523 gl::Limitations * /* outLimitations */) const
524{
Jamie Madill327ba852016-11-30 12:38:28 -0500525 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500526 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800527 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
528 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400529 outCaps->maxVaryingVectors = 16;
530 outCaps->maxTextureImageUnits = 1;
531 outCaps->maxCombinedTextureImageUnits = 1;
532 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400533 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400534 outCaps->maxFragmentUniformVectors = 8;
535 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb8353b02017-01-25 12:57:21 -0800536
537 // Enable this for simple buffer readback testing, but some functionality is missing.
538 // TODO(jmadill): Support full mapBufferRange extension.
539 outExtensions->mapBuffer = true;
540 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400541}
542
543const gl::Caps &RendererVk::getNativeCaps() const
544{
545 ensureCapsInitialized();
546 return mNativeCaps;
547}
548
549const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
550{
551 ensureCapsInitialized();
552 return mNativeTextureCaps;
553}
554
555const gl::Extensions &RendererVk::getNativeExtensions() const
556{
557 ensureCapsInitialized();
558 return mNativeExtensions;
559}
560
561const gl::Limitations &RendererVk::getNativeLimitations() const
562{
563 ensureCapsInitialized();
564 return mNativeLimitations;
565}
566
Jamie Madill0c0dc342017-03-24 14:18:51 -0400567vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500568{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400569 ANGLE_TRY(mCommandBuffer.begin(mDevice));
570 *commandBufferOut = &mCommandBuffer;
571 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500572}
573
Jamie Madill0c0dc342017-03-24 14:18:51 -0400574vk::Error RendererVk::submitCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500575{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400576 ANGLE_TRY(commandBuffer->end());
577
Jamie Madill4d0bf552016-12-28 15:45:24 -0500578 VkFenceCreateInfo fenceInfo;
579 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
580 fenceInfo.pNext = nullptr;
581 fenceInfo.flags = 0;
582
Jamie Madill4d0bf552016-12-28 15:45:24 -0500583 VkSubmitInfo submitInfo;
584 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
585 submitInfo.pNext = nullptr;
586 submitInfo.waitSemaphoreCount = 0;
587 submitInfo.pWaitSemaphores = nullptr;
588 submitInfo.pWaitDstStageMask = nullptr;
589 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400590 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500591 submitInfo.signalSemaphoreCount = 0;
592 submitInfo.pSignalSemaphores = nullptr;
593
594 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500595 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500596
Jamie Madillf651c772017-02-21 15:03:51 -0500597 return vk::NoError();
598}
599
Jamie Madill0c0dc342017-03-24 14:18:51 -0400600vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500601{
602 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500603 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500604
605 return vk::NoError();
606}
607
Jamie Madill0c0dc342017-03-24 14:18:51 -0400608vk::Error RendererVk::submitCommandsWithSync(vk::CommandBuffer *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400609 const vk::Semaphore &waitSemaphore,
610 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500611{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400612 ANGLE_TRY(commandBuffer->end());
613
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500614 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
615
616 VkSubmitInfo submitInfo;
617 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
618 submitInfo.pNext = nullptr;
619 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400620 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500621 submitInfo.pWaitDstStageMask = &waitStageMask;
622 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400623 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400624 submitInfo.signalSemaphoreCount = 1;
625 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500626
627 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400628 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500629
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500630 return vk::NoError();
631}
632
Jamie Madill4c26fc22017-02-24 11:04:10 -0500633vk::Error RendererVk::finish()
634{
635 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500636 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400637 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500638 return vk::NoError();
639}
640
Jamie Madill0c0dc342017-03-24 14:18:51 -0400641void RendererVk::freeAllInFlightResources()
642{
643 for (auto &fence : mInFlightFences)
644 {
645 fence.destroy(mDevice);
646 }
647 mInFlightFences.clear();
648
649 for (auto &command : mInFlightCommands)
650 {
651 command.destroy(mDevice);
652 }
653 mInFlightCommands.clear();
654
655 for (auto &garbage : mGarbage)
656 {
657 garbage->destroy(mDevice);
658 }
659 mGarbage.clear();
660}
661
Jamie Madill4c26fc22017-02-24 11:04:10 -0500662vk::Error RendererVk::checkInFlightCommands()
663{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400664 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500665
Jamie Madill4c26fc22017-02-24 11:04:10 -0500666 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400667 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500668 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400669 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500670
Jamie Madill0c0dc342017-03-24 14:18:51 -0400671 VkResult result = inFlightFence->get().getStatus(mDevice);
672 if (result == VK_NOT_READY)
673 break;
674 ANGLE_VK_TRY(result);
675 finishedIndex = index + 1;
676
677 // Release the fence handle.
678 // TODO(jmadill): Re-use fences.
679 inFlightFence->destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500680 }
681
Jamie Madill0c0dc342017-03-24 14:18:51 -0400682 if (finishedIndex == 0)
683 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500684
Jamie Madill0c0dc342017-03-24 14:18:51 -0400685 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
686 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
687
688 size_t completedCBIndex = 0;
689 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
690 {
691 auto *inFlightCB = &mInFlightCommands[cbIndex];
692 if (inFlightCB->queueSerial() > finishedSerial)
693 break;
694
695 completedCBIndex = cbIndex + 1;
696 inFlightCB->destroy(mDevice);
697 }
698
699 if (completedCBIndex == 0)
700 return vk::NoError();
701
702 mInFlightCommands.erase(mInFlightCommands.begin(),
703 mInFlightCommands.begin() + completedCBIndex);
704
705 size_t freeIndex = 0;
706 for (; freeIndex < mGarbage.size(); ++freeIndex)
707 {
708 if (!mGarbage[freeIndex]->destroyIfComplete(mDevice, finishedSerial))
709 break;
710 }
711
712 // Remove the entries from the garbage list - they should be ready to go.
713 if (freeIndex > 0)
714 {
715 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500716 }
717
Jamie Madill4c26fc22017-02-24 11:04:10 -0500718 return vk::NoError();
719}
720
721vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
722{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400723 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500724
725 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400726 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500727
728 // Sanity check.
729 ASSERT(mInFlightCommands.size() < 1000u);
730
Jamie Madill0c0dc342017-03-24 14:18:51 -0400731 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400732 // TODO(jmadill): Overflow check.
733 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400734
735 return vk::NoError();
736}
737
738vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
739{
740 VkFenceCreateInfo createInfo;
741 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
742 createInfo.pNext = nullptr;
743 createInfo.flags = 0;
744
745 vk::Fence fence;
746 ANGLE_TRY(fence.init(mDevice, createInfo));
747
748 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
749
750 // Store this command buffer in the in-flight list.
751 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
752 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
753
754 // Sanity check.
755 ASSERT(mInFlightCommands.size() < 1000u);
756
757 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400758 // TODO(jmadill): Overflow check.
759 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400760
761 ANGLE_TRY(checkInFlightCommands());
762
Jamie Madill4c26fc22017-02-24 11:04:10 -0500763 return vk::NoError();
764}
765
Jamie Madill5deea722017-02-16 10:44:46 -0500766vk::Error RendererVk::createStagingImage(TextureDimension dimension,
767 const vk::Format &format,
768 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400769 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500770 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500771{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400772 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
773 format.native, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500774 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500775}
776
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500777GlslangWrapper *RendererVk::getGlslangWrapper()
778{
779 return mGlslangWrapper;
780}
781
Jamie Madill4c26fc22017-02-24 11:04:10 -0500782Serial RendererVk::getCurrentQueueSerial() const
783{
784 return mCurrentQueueSerial;
785}
786
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400787} // namespace rx