blob: f98f24216e4fea759bda5e3224fc9f29fb7148f6 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
Jamie Madillacf2f3a2017-11-21 19:22:44 -050044 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050045 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill0448ec82016-12-23 13:41:47 -050087RendererVk::RendererVk()
88 : mCapsInitialized(false),
89 mInstance(VK_NULL_HANDLE),
90 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -050091 mDebugReportCallback(VK_NULL_HANDLE),
92 mPhysicalDevice(VK_NULL_HANDLE),
93 mQueue(VK_NULL_HANDLE),
94 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
95 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -050096 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -040097 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
98 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill1b038242017-11-01 15:14:36 -040099 mInFlightCommands(),
100 mCurrentRenderPassFramebuffer(nullptr)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400101{
102}
103
104RendererVk::~RendererVk()
105{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400106 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500107 {
108 vk::Error error = finish();
109 if (error.isError())
110 {
111 ERR() << "Error during VK shutdown: " << error;
112 }
113 }
114
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500115 if (mGlslangWrapper)
116 {
117 GlslangWrapper::ReleaseReference();
118 mGlslangWrapper = nullptr;
119 }
120
Jamie Madill5deea722017-02-16 10:44:46 -0500121 if (mCommandBuffer.valid())
122 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500123 mCommandBuffer.destroy(mDevice, mCommandPool);
Jamie Madill5deea722017-02-16 10:44:46 -0500124 }
125
126 if (mCommandPool.valid())
127 {
128 mCommandPool.destroy(mDevice);
129 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500130
131 if (mDevice)
132 {
133 vkDestroyDevice(mDevice, nullptr);
134 mDevice = VK_NULL_HANDLE;
135 }
136
Jamie Madill0448ec82016-12-23 13:41:47 -0500137 if (mDebugReportCallback)
138 {
139 ASSERT(mInstance);
140 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
141 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
142 ASSERT(destroyDebugReportCallback);
143 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
144 }
145
Jamie Madill4d0bf552016-12-28 15:45:24 -0500146 if (mInstance)
147 {
148 vkDestroyInstance(mInstance, nullptr);
149 mInstance = VK_NULL_HANDLE;
150 }
151
152 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500153}
154
Frank Henigman29f148b2016-11-23 21:05:36 -0500155vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500156{
Jamie Madill222c5172017-07-19 16:15:42 -0400157 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500158
159 // If we're loading the validation layers, we could be running from any random directory.
160 // Change to the executable directory so we can find the layers, then change back to the
161 // previous directory to be safe we don't disrupt the application.
162 std::string previousCWD;
163
164 if (mEnableValidationLayers)
165 {
166 const auto &cwd = angle::GetCWD();
167 if (!cwd.valid())
168 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500169 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500170 mEnableValidationLayers = false;
171 }
172 else
173 {
174 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400175 const char *exeDir = angle::GetExecutableDirectory();
176 if (!angle::SetCWD(exeDir))
177 {
178 ERR() << "Error setting CWD for Vulkan layers init.";
179 mEnableValidationLayers = false;
180 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500181 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400182 }
183
184 // Override environment variable to use the ANGLE layers.
185 if (mEnableValidationLayers)
186 {
187 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
188 {
189 ERR() << "Error setting environment for Vulkan layers init.";
190 mEnableValidationLayers = false;
191 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500192 }
193
Jamie Madill0448ec82016-12-23 13:41:47 -0500194 // Gather global layer properties.
195 uint32_t instanceLayerCount = 0;
196 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
197
198 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
199 if (instanceLayerCount > 0)
200 {
201 ANGLE_VK_TRY(
202 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
203 }
204
Jamie Madille09bd5d2016-11-29 16:20:35 -0500205 uint32_t instanceExtensionCount = 0;
206 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
207
208 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
209 if (instanceExtensionCount > 0)
210 {
211 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
212 instanceExtensionProps.data()));
213 }
214
Jamie Madill0448ec82016-12-23 13:41:47 -0500215 if (mEnableValidationLayers)
216 {
217 // Verify the standard validation layers are available.
218 if (!HasStandardValidationLayer(instanceLayerProps))
219 {
220 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400221 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
222 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500223 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500224 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500225 }
226 else
227 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500228 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500229 }
230 mEnableValidationLayers = false;
231 }
232 }
233
Jamie Madille09bd5d2016-11-29 16:20:35 -0500234 std::vector<const char *> enabledInstanceExtensions;
235 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500236 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500237
Jamie Madill0448ec82016-12-23 13:41:47 -0500238 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
239 if (mEnableValidationLayers)
240 {
241 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
242 }
243
Jamie Madille09bd5d2016-11-29 16:20:35 -0500244 // Verify the required extensions are in the extension names set. Fail if not.
245 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
246
Jamie Madill327ba852016-11-30 12:38:28 -0500247 VkApplicationInfo applicationInfo;
248 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
249 applicationInfo.pNext = nullptr;
250 applicationInfo.pApplicationName = "ANGLE";
251 applicationInfo.applicationVersion = 1;
252 applicationInfo.pEngineName = "ANGLE";
253 applicationInfo.engineVersion = 1;
254 applicationInfo.apiVersion = VK_API_VERSION_1_0;
255
256 VkInstanceCreateInfo instanceInfo;
257 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
258 instanceInfo.pNext = nullptr;
259 instanceInfo.flags = 0;
260 instanceInfo.pApplicationInfo = &applicationInfo;
261
Jamie Madille09bd5d2016-11-29 16:20:35 -0500262 // Enable requested layers and extensions.
263 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
264 instanceInfo.ppEnabledExtensionNames =
265 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500266 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
267 instanceInfo.ppEnabledLayerNames =
268 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500269
270 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
271
Jamie Madill0448ec82016-12-23 13:41:47 -0500272 if (mEnableValidationLayers)
273 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500274 // Change back to the previous working directory now that we've loaded the instance -
275 // the validation layers should be loaded at this point.
276 angle::SetCWD(previousCWD.c_str());
277
Jamie Madill0448ec82016-12-23 13:41:47 -0500278 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
279
280 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
281 debugReportInfo.pNext = nullptr;
282 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
283 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
284 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
285 debugReportInfo.pfnCallback = &DebugReportCallback;
286 debugReportInfo.pUserData = this;
287
288 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
289 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
290 ASSERT(createDebugReportCallback);
291 ANGLE_VK_TRY(
292 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
293 }
294
Jamie Madill4d0bf552016-12-28 15:45:24 -0500295 uint32_t physicalDeviceCount = 0;
296 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
297 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
298
299 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
300 physicalDeviceCount = 1;
301 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
302
303 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
304
305 // Ensure we can find a graphics queue family.
306 uint32_t queueCount = 0;
307 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
308
309 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
310
311 mQueueFamilyProperties.resize(queueCount);
312 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
313 mQueueFamilyProperties.data());
314
315 size_t graphicsQueueFamilyCount = false;
316 uint32_t firstGraphicsQueueFamily = 0;
317 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
318 {
319 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
320 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
321 {
322 ASSERT(queueInfo.queueCount > 0);
323 graphicsQueueFamilyCount++;
324 if (firstGraphicsQueueFamily == 0)
325 {
326 firstGraphicsQueueFamily = familyIndex;
327 }
328 break;
329 }
330 }
331
332 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
333
334 // If only one queue family, go ahead and initialize the device. If there is more than one
335 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
336 if (graphicsQueueFamilyCount == 1)
337 {
338 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
339 }
340
Jamie Madill035fd6b2017-10-03 15:43:22 -0400341 // Store the physical device memory properties so we can find the right memory pools.
342 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500343
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500344 mGlslangWrapper = GlslangWrapper::GetReference();
345
Jamie Madill6a89d222017-11-02 11:59:51 -0400346 // Initialize the format table.
347 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
348
Jamie Madill327ba852016-11-30 12:38:28 -0500349 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400350}
351
Jamie Madill4d0bf552016-12-28 15:45:24 -0500352vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
353{
354 uint32_t deviceLayerCount = 0;
355 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
356
357 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
358 if (deviceLayerCount > 0)
359 {
360 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
361 deviceLayerProps.data()));
362 }
363
364 uint32_t deviceExtensionCount = 0;
365 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
366 &deviceExtensionCount, nullptr));
367
368 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
369 if (deviceExtensionCount > 0)
370 {
371 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
372 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
373 }
374
375 if (mEnableValidationLayers)
376 {
377 if (!HasStandardValidationLayer(deviceLayerProps))
378 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500379 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500380 mEnableValidationLayers = false;
381 }
382 }
383
384 std::vector<const char *> enabledDeviceExtensions;
385 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
386
387 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
388
389 VkDeviceQueueCreateInfo queueCreateInfo;
390
391 float zeroPriority = 0.0f;
392
393 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
394 queueCreateInfo.pNext = nullptr;
395 queueCreateInfo.flags = 0;
396 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
397 queueCreateInfo.queueCount = 1;
398 queueCreateInfo.pQueuePriorities = &zeroPriority;
399
400 // Initialize the device
401 VkDeviceCreateInfo createInfo;
402
403 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
404 createInfo.pNext = nullptr;
405 createInfo.flags = 0;
406 createInfo.queueCreateInfoCount = 1;
407 createInfo.pQueueCreateInfos = &queueCreateInfo;
408 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
409 createInfo.ppEnabledLayerNames =
410 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
411 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
412 createInfo.ppEnabledExtensionNames =
413 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
414 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
415
416 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
417
418 mCurrentQueueFamilyIndex = queueFamilyIndex;
419
420 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
421
422 // Initialize the command pool now that we know the queue family index.
423 VkCommandPoolCreateInfo commandPoolInfo;
424 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
425 commandPoolInfo.pNext = nullptr;
426 // TODO(jmadill): Investigate transient command buffers.
427 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
428 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
429
Jamie Madill5deea722017-02-16 10:44:46 -0500430 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500431
Jamie Madill4d0bf552016-12-28 15:45:24 -0500432 return vk::NoError();
433}
434
435vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
436{
437 // We've already initialized a device, and can't re-create it unless it's never been used.
438 // TODO(jmadill): Handle the re-creation case if necessary.
439 if (mDevice != VK_NULL_HANDLE)
440 {
441 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
442
443 // Check if the current device supports present on this surface.
444 VkBool32 supportsPresent = VK_FALSE;
445 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
446 surface, &supportsPresent));
447
448 return (supportsPresent == VK_TRUE);
449 }
450
451 // Find a graphics and present queue.
452 Optional<uint32_t> newPresentQueue;
453 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
454 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
455 {
456 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
457 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
458 {
459 VkBool32 supportsPresent = VK_FALSE;
460 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
461 &supportsPresent));
462
463 if (supportsPresent == VK_TRUE)
464 {
465 newPresentQueue = queueIndex;
466 break;
467 }
468 }
469 }
470
471 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
472 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
473
474 return newPresentQueue.value();
475}
476
477std::string RendererVk::getVendorString() const
478{
479 switch (mPhysicalDeviceProperties.vendorID)
480 {
481 case VENDOR_ID_AMD:
482 return "Advanced Micro Devices";
483 case VENDOR_ID_NVIDIA:
484 return "NVIDIA";
485 case VENDOR_ID_INTEL:
486 return "Intel";
487 default:
488 {
489 // TODO(jmadill): More vendor IDs.
490 std::stringstream strstr;
491 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
492 return strstr.str();
493 }
494 }
495}
496
Jamie Madille09bd5d2016-11-29 16:20:35 -0500497std::string RendererVk::getRendererDescription() const
498{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500499 std::stringstream strstr;
500
501 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
502
503 strstr << "Vulkan ";
504 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
505 strstr << VK_VERSION_MINOR(apiVersion) << ".";
506 strstr << VK_VERSION_PATCH(apiVersion);
507
508 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
509
510 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500511}
512
Jamie Madillacccc6c2016-05-03 17:22:10 -0400513void RendererVk::ensureCapsInitialized() const
514{
515 if (!mCapsInitialized)
516 {
517 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
518 mCapsInitialized = true;
519 }
520}
521
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500522void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400523 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800524 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400525 gl::Limitations * /* outLimitations */) const
526{
Jamie Madill327ba852016-11-30 12:38:28 -0500527 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500528 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800529 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
530 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400531 outCaps->maxVaryingVectors = 16;
532 outCaps->maxTextureImageUnits = 1;
533 outCaps->maxCombinedTextureImageUnits = 1;
534 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400535 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400536 outCaps->maxFragmentUniformVectors = 8;
537 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400538 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800539
540 // Enable this for simple buffer readback testing, but some functionality is missing.
541 // TODO(jmadill): Support full mapBufferRange extension.
542 outExtensions->mapBuffer = true;
543 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400544}
545
546const gl::Caps &RendererVk::getNativeCaps() const
547{
548 ensureCapsInitialized();
549 return mNativeCaps;
550}
551
552const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
553{
554 ensureCapsInitialized();
555 return mNativeTextureCaps;
556}
557
558const gl::Extensions &RendererVk::getNativeExtensions() const
559{
560 ensureCapsInitialized();
561 return mNativeExtensions;
562}
563
564const gl::Limitations &RendererVk::getNativeLimitations() const
565{
566 ensureCapsInitialized();
567 return mNativeLimitations;
568}
569
Jamie Madill7f738d42017-11-20 17:06:27 -0500570vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBufferAndState **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500571{
Jamie Madill7f738d42017-11-20 17:06:27 -0500572 ANGLE_TRY(mCommandBuffer.ensureStarted(mDevice, mCommandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400573 *commandBufferOut = &mCommandBuffer;
574 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500575}
576
Jamie Madill7f738d42017-11-20 17:06:27 -0500577vk::Error RendererVk::submitCommandBuffer(vk::CommandBufferAndState *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500578{
Jamie Madill7f738d42017-11-20 17:06:27 -0500579 ANGLE_TRY(commandBuffer->ensureFinished());
Jamie Madill0c0dc342017-03-24 14:18:51 -0400580
Jamie Madill4d0bf552016-12-28 15:45:24 -0500581 VkFenceCreateInfo fenceInfo;
582 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
583 fenceInfo.pNext = nullptr;
584 fenceInfo.flags = 0;
585
Jamie Madill4d0bf552016-12-28 15:45:24 -0500586 VkSubmitInfo submitInfo;
587 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
588 submitInfo.pNext = nullptr;
589 submitInfo.waitSemaphoreCount = 0;
590 submitInfo.pWaitSemaphores = nullptr;
591 submitInfo.pWaitDstStageMask = nullptr;
592 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400593 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500594 submitInfo.signalSemaphoreCount = 0;
595 submitInfo.pSignalSemaphores = nullptr;
596
597 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500598 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500599
Jamie Madillf651c772017-02-21 15:03:51 -0500600 return vk::NoError();
601}
602
Jamie Madill7f738d42017-11-20 17:06:27 -0500603vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBufferAndState *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500604{
605 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500606 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500607
608 return vk::NoError();
609}
610
Jamie Madill7f738d42017-11-20 17:06:27 -0500611vk::Error RendererVk::submitCommandsWithSync(vk::CommandBufferAndState *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400612 const vk::Semaphore &waitSemaphore,
613 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500614{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400615 ANGLE_TRY(commandBuffer->end());
616
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500617 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
618
619 VkSubmitInfo submitInfo;
620 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
621 submitInfo.pNext = nullptr;
622 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400623 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500624 submitInfo.pWaitDstStageMask = &waitStageMask;
625 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400626 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400627 submitInfo.signalSemaphoreCount = 1;
628 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500629
630 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400631 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500632
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500633 return vk::NoError();
634}
635
Jamie Madill4c26fc22017-02-24 11:04:10 -0500636vk::Error RendererVk::finish()
637{
638 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500639 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400640 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500641 return vk::NoError();
642}
643
Jamie Madill0c0dc342017-03-24 14:18:51 -0400644void RendererVk::freeAllInFlightResources()
645{
646 for (auto &fence : mInFlightFences)
647 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500648 fence.get().destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400649 }
650 mInFlightFences.clear();
651
652 for (auto &command : mInFlightCommands)
653 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500654 command.get().destroy(mDevice, mCommandPool);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400655 }
656 mInFlightCommands.clear();
657
658 for (auto &garbage : mGarbage)
659 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400660 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400661 }
662 mGarbage.clear();
663}
664
Jamie Madill4c26fc22017-02-24 11:04:10 -0500665vk::Error RendererVk::checkInFlightCommands()
666{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400667 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500668
Jamie Madill4c26fc22017-02-24 11:04:10 -0500669 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400670 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500671 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400672 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500673
Jamie Madill0c0dc342017-03-24 14:18:51 -0400674 VkResult result = inFlightFence->get().getStatus(mDevice);
675 if (result == VK_NOT_READY)
676 break;
677 ANGLE_VK_TRY(result);
678 finishedIndex = index + 1;
679
680 // Release the fence handle.
681 // TODO(jmadill): Re-use fences.
Jamie Madill7f738d42017-11-20 17:06:27 -0500682 inFlightFence->get().destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500683 }
684
Jamie Madill0c0dc342017-03-24 14:18:51 -0400685 if (finishedIndex == 0)
686 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500687
Jamie Madill0c0dc342017-03-24 14:18:51 -0400688 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
689 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
690
691 size_t completedCBIndex = 0;
692 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
693 {
694 auto *inFlightCB = &mInFlightCommands[cbIndex];
695 if (inFlightCB->queueSerial() > finishedSerial)
696 break;
697
698 completedCBIndex = cbIndex + 1;
Jamie Madill7f738d42017-11-20 17:06:27 -0500699 inFlightCB->get().destroy(mDevice, mCommandPool);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400700 }
701
702 if (completedCBIndex == 0)
703 return vk::NoError();
704
705 mInFlightCommands.erase(mInFlightCommands.begin(),
706 mInFlightCommands.begin() + completedCBIndex);
707
708 size_t freeIndex = 0;
709 for (; freeIndex < mGarbage.size(); ++freeIndex)
710 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400711 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, finishedSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400712 break;
713 }
714
715 // Remove the entries from the garbage list - they should be ready to go.
716 if (freeIndex > 0)
717 {
718 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500719 }
720
Jamie Madill4c26fc22017-02-24 11:04:10 -0500721 return vk::NoError();
722}
723
724vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
725{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400726 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500727
728 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400729 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500730
731 // Sanity check.
732 ASSERT(mInFlightCommands.size() < 1000u);
733
Jamie Madill0c0dc342017-03-24 14:18:51 -0400734 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400735 // TODO(jmadill): Overflow check.
736 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400737
738 return vk::NoError();
739}
740
741vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
742{
743 VkFenceCreateInfo createInfo;
744 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
745 createInfo.pNext = nullptr;
746 createInfo.flags = 0;
747
748 vk::Fence fence;
749 ANGLE_TRY(fence.init(mDevice, createInfo));
750
751 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
752
753 // Store this command buffer in the in-flight list.
754 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
755 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
756
757 // Sanity check.
758 ASSERT(mInFlightCommands.size() < 1000u);
759
760 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400761 // TODO(jmadill): Overflow check.
762 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400763
764 ANGLE_TRY(checkInFlightCommands());
765
Jamie Madill4c26fc22017-02-24 11:04:10 -0500766 return vk::NoError();
767}
768
Jamie Madill5deea722017-02-16 10:44:46 -0500769vk::Error RendererVk::createStagingImage(TextureDimension dimension,
770 const vk::Format &format,
771 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400772 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500773 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500774{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400775 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
Jamie Madill1d7be502017-10-29 18:06:50 -0400776 format.vkTextureFormat, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500777 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500778}
779
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500780GlslangWrapper *RendererVk::getGlslangWrapper()
781{
782 return mGlslangWrapper;
783}
784
Jamie Madill4c26fc22017-02-24 11:04:10 -0500785Serial RendererVk::getCurrentQueueSerial() const
786{
787 return mCurrentQueueSerial;
788}
789
Jamie Madill1b038242017-11-01 15:14:36 -0400790gl::Error RendererVk::ensureInRenderPass(const gl::Context *context, FramebufferVk *framebufferVk)
791{
792 if (mCurrentRenderPassFramebuffer == framebufferVk)
793 {
794 return gl::NoError();
795 }
796
797 if (mCurrentRenderPassFramebuffer)
798 {
799 endRenderPass();
800 }
801 ANGLE_TRY(
802 framebufferVk->beginRenderPass(context, mDevice, &mCommandBuffer, mCurrentQueueSerial));
803 mCurrentRenderPassFramebuffer = framebufferVk;
804 return gl::NoError();
805}
806
807void RendererVk::endRenderPass()
808{
809 if (mCurrentRenderPassFramebuffer)
810 {
811 ASSERT(mCommandBuffer.started());
812 mCommandBuffer.endRenderPass();
813 mCurrentRenderPassFramebuffer = nullptr;
814 }
815}
816
Jamie Madill7bd16662017-10-28 19:40:50 -0400817void RendererVk::onReleaseRenderPass(const FramebufferVk *framebufferVk)
818{
819 if (mCurrentRenderPassFramebuffer == framebufferVk)
820 {
821 endRenderPass();
822 }
823}
824
Jamie Madill97760352017-11-09 13:08:29 -0500825bool RendererVk::isResourceInUse(const ResourceVk &resource)
826{
827 return isSerialInUse(resource.getQueueSerial());
828}
829
830bool RendererVk::isSerialInUse(Serial serial)
831{
832 return serial > mLastCompletedQueueSerial;
833}
834
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400835} // namespace rx