blob: 1ec979b44c9d20f5b9ec413a293b65034c00dd83 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
44 for (const auto &extensionName : enabledExtensionNames)
45 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill0448ec82016-12-23 13:41:47 -050087RendererVk::RendererVk()
88 : mCapsInitialized(false),
89 mInstance(VK_NULL_HANDLE),
90 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -050091 mDebugReportCallback(VK_NULL_HANDLE),
92 mPhysicalDevice(VK_NULL_HANDLE),
93 mQueue(VK_NULL_HANDLE),
94 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
95 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -050096 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -040097 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
98 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill1b038242017-11-01 15:14:36 -040099 mInFlightCommands(),
100 mCurrentRenderPassFramebuffer(nullptr)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400101{
102}
103
104RendererVk::~RendererVk()
105{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400106 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500107 {
108 vk::Error error = finish();
109 if (error.isError())
110 {
111 ERR() << "Error during VK shutdown: " << error;
112 }
113 }
114
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500115 if (mGlslangWrapper)
116 {
117 GlslangWrapper::ReleaseReference();
118 mGlslangWrapper = nullptr;
119 }
120
Jamie Madill5deea722017-02-16 10:44:46 -0500121 if (mCommandBuffer.valid())
122 {
123 mCommandBuffer.destroy(mDevice);
124 }
125
126 if (mCommandPool.valid())
127 {
128 mCommandPool.destroy(mDevice);
129 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500130
131 if (mDevice)
132 {
133 vkDestroyDevice(mDevice, nullptr);
134 mDevice = VK_NULL_HANDLE;
135 }
136
Jamie Madill0448ec82016-12-23 13:41:47 -0500137 if (mDebugReportCallback)
138 {
139 ASSERT(mInstance);
140 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
141 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
142 ASSERT(destroyDebugReportCallback);
143 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
144 }
145
Jamie Madill4d0bf552016-12-28 15:45:24 -0500146 if (mInstance)
147 {
148 vkDestroyInstance(mInstance, nullptr);
149 mInstance = VK_NULL_HANDLE;
150 }
151
152 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500153}
154
Frank Henigman29f148b2016-11-23 21:05:36 -0500155vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500156{
Jamie Madill222c5172017-07-19 16:15:42 -0400157 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500158
159 // If we're loading the validation layers, we could be running from any random directory.
160 // Change to the executable directory so we can find the layers, then change back to the
161 // previous directory to be safe we don't disrupt the application.
162 std::string previousCWD;
163
164 if (mEnableValidationLayers)
165 {
166 const auto &cwd = angle::GetCWD();
167 if (!cwd.valid())
168 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500169 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500170 mEnableValidationLayers = false;
171 }
172 else
173 {
174 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400175 const char *exeDir = angle::GetExecutableDirectory();
176 if (!angle::SetCWD(exeDir))
177 {
178 ERR() << "Error setting CWD for Vulkan layers init.";
179 mEnableValidationLayers = false;
180 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500181 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400182 }
183
184 // Override environment variable to use the ANGLE layers.
185 if (mEnableValidationLayers)
186 {
187 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
188 {
189 ERR() << "Error setting environment for Vulkan layers init.";
190 mEnableValidationLayers = false;
191 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500192 }
193
Jamie Madill0448ec82016-12-23 13:41:47 -0500194 // Gather global layer properties.
195 uint32_t instanceLayerCount = 0;
196 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
197
198 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
199 if (instanceLayerCount > 0)
200 {
201 ANGLE_VK_TRY(
202 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
203 }
204
Jamie Madille09bd5d2016-11-29 16:20:35 -0500205 uint32_t instanceExtensionCount = 0;
206 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
207
208 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
209 if (instanceExtensionCount > 0)
210 {
211 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
212 instanceExtensionProps.data()));
213 }
214
Jamie Madill0448ec82016-12-23 13:41:47 -0500215 if (mEnableValidationLayers)
216 {
217 // Verify the standard validation layers are available.
218 if (!HasStandardValidationLayer(instanceLayerProps))
219 {
220 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400221 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
222 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500223 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500224 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500225 }
226 else
227 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500228 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500229 }
230 mEnableValidationLayers = false;
231 }
232 }
233
Jamie Madille09bd5d2016-11-29 16:20:35 -0500234 std::vector<const char *> enabledInstanceExtensions;
235 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500236 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500237
Jamie Madill0448ec82016-12-23 13:41:47 -0500238 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
239 if (mEnableValidationLayers)
240 {
241 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
242 }
243
Jamie Madille09bd5d2016-11-29 16:20:35 -0500244 // Verify the required extensions are in the extension names set. Fail if not.
245 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
246
Jamie Madill327ba852016-11-30 12:38:28 -0500247 VkApplicationInfo applicationInfo;
248 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
249 applicationInfo.pNext = nullptr;
250 applicationInfo.pApplicationName = "ANGLE";
251 applicationInfo.applicationVersion = 1;
252 applicationInfo.pEngineName = "ANGLE";
253 applicationInfo.engineVersion = 1;
254 applicationInfo.apiVersion = VK_API_VERSION_1_0;
255
256 VkInstanceCreateInfo instanceInfo;
257 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
258 instanceInfo.pNext = nullptr;
259 instanceInfo.flags = 0;
260 instanceInfo.pApplicationInfo = &applicationInfo;
261
Jamie Madille09bd5d2016-11-29 16:20:35 -0500262 // Enable requested layers and extensions.
263 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
264 instanceInfo.ppEnabledExtensionNames =
265 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500266 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
267 instanceInfo.ppEnabledLayerNames =
268 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500269
270 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
271
Jamie Madill0448ec82016-12-23 13:41:47 -0500272 if (mEnableValidationLayers)
273 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500274 // Change back to the previous working directory now that we've loaded the instance -
275 // the validation layers should be loaded at this point.
276 angle::SetCWD(previousCWD.c_str());
277
Jamie Madill0448ec82016-12-23 13:41:47 -0500278 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
279
280 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
281 debugReportInfo.pNext = nullptr;
282 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
283 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
284 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
285 debugReportInfo.pfnCallback = &DebugReportCallback;
286 debugReportInfo.pUserData = this;
287
288 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
289 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
290 ASSERT(createDebugReportCallback);
291 ANGLE_VK_TRY(
292 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
293 }
294
Jamie Madill4d0bf552016-12-28 15:45:24 -0500295 uint32_t physicalDeviceCount = 0;
296 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
297 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
298
299 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
300 physicalDeviceCount = 1;
301 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
302
303 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
304
305 // Ensure we can find a graphics queue family.
306 uint32_t queueCount = 0;
307 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
308
309 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
310
311 mQueueFamilyProperties.resize(queueCount);
312 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
313 mQueueFamilyProperties.data());
314
315 size_t graphicsQueueFamilyCount = false;
316 uint32_t firstGraphicsQueueFamily = 0;
317 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
318 {
319 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
320 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
321 {
322 ASSERT(queueInfo.queueCount > 0);
323 graphicsQueueFamilyCount++;
324 if (firstGraphicsQueueFamily == 0)
325 {
326 firstGraphicsQueueFamily = familyIndex;
327 }
328 break;
329 }
330 }
331
332 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
333
334 // If only one queue family, go ahead and initialize the device. If there is more than one
335 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
336 if (graphicsQueueFamilyCount == 1)
337 {
338 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
339 }
340
Jamie Madill035fd6b2017-10-03 15:43:22 -0400341 // Store the physical device memory properties so we can find the right memory pools.
342 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500343
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500344 mGlslangWrapper = GlslangWrapper::GetReference();
345
Jamie Madill6a89d222017-11-02 11:59:51 -0400346 // Initialize the format table.
347 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
348
Jamie Madill327ba852016-11-30 12:38:28 -0500349 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400350}
351
Jamie Madill4d0bf552016-12-28 15:45:24 -0500352vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
353{
354 uint32_t deviceLayerCount = 0;
355 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
356
357 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
358 if (deviceLayerCount > 0)
359 {
360 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
361 deviceLayerProps.data()));
362 }
363
364 uint32_t deviceExtensionCount = 0;
365 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
366 &deviceExtensionCount, nullptr));
367
368 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
369 if (deviceExtensionCount > 0)
370 {
371 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
372 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
373 }
374
375 if (mEnableValidationLayers)
376 {
377 if (!HasStandardValidationLayer(deviceLayerProps))
378 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500379 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500380 mEnableValidationLayers = false;
381 }
382 }
383
384 std::vector<const char *> enabledDeviceExtensions;
385 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
386
387 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
388
389 VkDeviceQueueCreateInfo queueCreateInfo;
390
391 float zeroPriority = 0.0f;
392
393 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
394 queueCreateInfo.pNext = nullptr;
395 queueCreateInfo.flags = 0;
396 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
397 queueCreateInfo.queueCount = 1;
398 queueCreateInfo.pQueuePriorities = &zeroPriority;
399
400 // Initialize the device
401 VkDeviceCreateInfo createInfo;
402
403 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
404 createInfo.pNext = nullptr;
405 createInfo.flags = 0;
406 createInfo.queueCreateInfoCount = 1;
407 createInfo.pQueueCreateInfos = &queueCreateInfo;
408 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
409 createInfo.ppEnabledLayerNames =
410 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
411 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
412 createInfo.ppEnabledExtensionNames =
413 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
414 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
415
416 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
417
418 mCurrentQueueFamilyIndex = queueFamilyIndex;
419
420 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
421
422 // Initialize the command pool now that we know the queue family index.
423 VkCommandPoolCreateInfo commandPoolInfo;
424 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
425 commandPoolInfo.pNext = nullptr;
426 // TODO(jmadill): Investigate transient command buffers.
427 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
428 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
429
Jamie Madill5deea722017-02-16 10:44:46 -0500430 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500431
Jamie Madill5deea722017-02-16 10:44:46 -0500432 mCommandBuffer.setCommandPool(&mCommandPool);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500433
434 return vk::NoError();
435}
436
437vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
438{
439 // We've already initialized a device, and can't re-create it unless it's never been used.
440 // TODO(jmadill): Handle the re-creation case if necessary.
441 if (mDevice != VK_NULL_HANDLE)
442 {
443 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
444
445 // Check if the current device supports present on this surface.
446 VkBool32 supportsPresent = VK_FALSE;
447 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
448 surface, &supportsPresent));
449
450 return (supportsPresent == VK_TRUE);
451 }
452
453 // Find a graphics and present queue.
454 Optional<uint32_t> newPresentQueue;
455 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
456 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
457 {
458 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
459 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
460 {
461 VkBool32 supportsPresent = VK_FALSE;
462 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
463 &supportsPresent));
464
465 if (supportsPresent == VK_TRUE)
466 {
467 newPresentQueue = queueIndex;
468 break;
469 }
470 }
471 }
472
473 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
474 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
475
476 return newPresentQueue.value();
477}
478
479std::string RendererVk::getVendorString() const
480{
481 switch (mPhysicalDeviceProperties.vendorID)
482 {
483 case VENDOR_ID_AMD:
484 return "Advanced Micro Devices";
485 case VENDOR_ID_NVIDIA:
486 return "NVIDIA";
487 case VENDOR_ID_INTEL:
488 return "Intel";
489 default:
490 {
491 // TODO(jmadill): More vendor IDs.
492 std::stringstream strstr;
493 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
494 return strstr.str();
495 }
496 }
497}
498
Jamie Madille09bd5d2016-11-29 16:20:35 -0500499std::string RendererVk::getRendererDescription() const
500{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500501 std::stringstream strstr;
502
503 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
504
505 strstr << "Vulkan ";
506 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
507 strstr << VK_VERSION_MINOR(apiVersion) << ".";
508 strstr << VK_VERSION_PATCH(apiVersion);
509
510 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
511
512 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500513}
514
Jamie Madillacccc6c2016-05-03 17:22:10 -0400515void RendererVk::ensureCapsInitialized() const
516{
517 if (!mCapsInitialized)
518 {
519 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
520 mCapsInitialized = true;
521 }
522}
523
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500524void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400525 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800526 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400527 gl::Limitations * /* outLimitations */) const
528{
Jamie Madill327ba852016-11-30 12:38:28 -0500529 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500530 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800531 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
532 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400533 outCaps->maxVaryingVectors = 16;
534 outCaps->maxTextureImageUnits = 1;
535 outCaps->maxCombinedTextureImageUnits = 1;
536 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400537 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400538 outCaps->maxFragmentUniformVectors = 8;
539 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400540 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800541
542 // Enable this for simple buffer readback testing, but some functionality is missing.
543 // TODO(jmadill): Support full mapBufferRange extension.
544 outExtensions->mapBuffer = true;
545 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400546}
547
548const gl::Caps &RendererVk::getNativeCaps() const
549{
550 ensureCapsInitialized();
551 return mNativeCaps;
552}
553
554const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
555{
556 ensureCapsInitialized();
557 return mNativeTextureCaps;
558}
559
560const gl::Extensions &RendererVk::getNativeExtensions() const
561{
562 ensureCapsInitialized();
563 return mNativeExtensions;
564}
565
566const gl::Limitations &RendererVk::getNativeLimitations() const
567{
568 ensureCapsInitialized();
569 return mNativeLimitations;
570}
571
Jamie Madill0c0dc342017-03-24 14:18:51 -0400572vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBuffer **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500573{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400574 ANGLE_TRY(mCommandBuffer.begin(mDevice));
575 *commandBufferOut = &mCommandBuffer;
576 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500577}
578
Jamie Madill0c0dc342017-03-24 14:18:51 -0400579vk::Error RendererVk::submitCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500580{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400581 ANGLE_TRY(commandBuffer->end());
582
Jamie Madill4d0bf552016-12-28 15:45:24 -0500583 VkFenceCreateInfo fenceInfo;
584 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
585 fenceInfo.pNext = nullptr;
586 fenceInfo.flags = 0;
587
Jamie Madill4d0bf552016-12-28 15:45:24 -0500588 VkSubmitInfo submitInfo;
589 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
590 submitInfo.pNext = nullptr;
591 submitInfo.waitSemaphoreCount = 0;
592 submitInfo.pWaitSemaphores = nullptr;
593 submitInfo.pWaitDstStageMask = nullptr;
594 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400595 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500596 submitInfo.signalSemaphoreCount = 0;
597 submitInfo.pSignalSemaphores = nullptr;
598
599 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500600 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500601
Jamie Madillf651c772017-02-21 15:03:51 -0500602 return vk::NoError();
603}
604
Jamie Madill0c0dc342017-03-24 14:18:51 -0400605vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBuffer *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500606{
607 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500608 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500609
610 return vk::NoError();
611}
612
Jamie Madill0c0dc342017-03-24 14:18:51 -0400613vk::Error RendererVk::submitCommandsWithSync(vk::CommandBuffer *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400614 const vk::Semaphore &waitSemaphore,
615 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500616{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400617 ANGLE_TRY(commandBuffer->end());
618
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500619 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
620
621 VkSubmitInfo submitInfo;
622 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
623 submitInfo.pNext = nullptr;
624 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400625 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500626 submitInfo.pWaitDstStageMask = &waitStageMask;
627 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400628 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400629 submitInfo.signalSemaphoreCount = 1;
630 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500631
632 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400633 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500634
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500635 return vk::NoError();
636}
637
Jamie Madill4c26fc22017-02-24 11:04:10 -0500638vk::Error RendererVk::finish()
639{
640 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500641 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400642 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500643 return vk::NoError();
644}
645
Jamie Madill0c0dc342017-03-24 14:18:51 -0400646void RendererVk::freeAllInFlightResources()
647{
648 for (auto &fence : mInFlightFences)
649 {
650 fence.destroy(mDevice);
651 }
652 mInFlightFences.clear();
653
654 for (auto &command : mInFlightCommands)
655 {
656 command.destroy(mDevice);
657 }
658 mInFlightCommands.clear();
659
660 for (auto &garbage : mGarbage)
661 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400662 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400663 }
664 mGarbage.clear();
665}
666
Jamie Madill4c26fc22017-02-24 11:04:10 -0500667vk::Error RendererVk::checkInFlightCommands()
668{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400669 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500670
Jamie Madill4c26fc22017-02-24 11:04:10 -0500671 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400672 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500673 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400674 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500675
Jamie Madill0c0dc342017-03-24 14:18:51 -0400676 VkResult result = inFlightFence->get().getStatus(mDevice);
677 if (result == VK_NOT_READY)
678 break;
679 ANGLE_VK_TRY(result);
680 finishedIndex = index + 1;
681
682 // Release the fence handle.
683 // TODO(jmadill): Re-use fences.
684 inFlightFence->destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500685 }
686
Jamie Madill0c0dc342017-03-24 14:18:51 -0400687 if (finishedIndex == 0)
688 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500689
Jamie Madill0c0dc342017-03-24 14:18:51 -0400690 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
691 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
692
693 size_t completedCBIndex = 0;
694 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
695 {
696 auto *inFlightCB = &mInFlightCommands[cbIndex];
697 if (inFlightCB->queueSerial() > finishedSerial)
698 break;
699
700 completedCBIndex = cbIndex + 1;
701 inFlightCB->destroy(mDevice);
702 }
703
704 if (completedCBIndex == 0)
705 return vk::NoError();
706
707 mInFlightCommands.erase(mInFlightCommands.begin(),
708 mInFlightCommands.begin() + completedCBIndex);
709
710 size_t freeIndex = 0;
711 for (; freeIndex < mGarbage.size(); ++freeIndex)
712 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400713 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, finishedSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400714 break;
715 }
716
717 // Remove the entries from the garbage list - they should be ready to go.
718 if (freeIndex > 0)
719 {
720 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500721 }
722
Jamie Madill4c26fc22017-02-24 11:04:10 -0500723 return vk::NoError();
724}
725
726vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
727{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400728 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500729
730 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400731 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500732
733 // Sanity check.
734 ASSERT(mInFlightCommands.size() < 1000u);
735
Jamie Madill0c0dc342017-03-24 14:18:51 -0400736 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400737 // TODO(jmadill): Overflow check.
738 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400739
740 return vk::NoError();
741}
742
743vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
744{
745 VkFenceCreateInfo createInfo;
746 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
747 createInfo.pNext = nullptr;
748 createInfo.flags = 0;
749
750 vk::Fence fence;
751 ANGLE_TRY(fence.init(mDevice, createInfo));
752
753 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
754
755 // Store this command buffer in the in-flight list.
756 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
757 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
758
759 // Sanity check.
760 ASSERT(mInFlightCommands.size() < 1000u);
761
762 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400763 // TODO(jmadill): Overflow check.
764 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400765
766 ANGLE_TRY(checkInFlightCommands());
767
Jamie Madill4c26fc22017-02-24 11:04:10 -0500768 return vk::NoError();
769}
770
Jamie Madill5deea722017-02-16 10:44:46 -0500771vk::Error RendererVk::createStagingImage(TextureDimension dimension,
772 const vk::Format &format,
773 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400774 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500775 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500776{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400777 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
Jamie Madill1d7be502017-10-29 18:06:50 -0400778 format.vkTextureFormat, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500779 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500780}
781
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500782GlslangWrapper *RendererVk::getGlslangWrapper()
783{
784 return mGlslangWrapper;
785}
786
Jamie Madill4c26fc22017-02-24 11:04:10 -0500787Serial RendererVk::getCurrentQueueSerial() const
788{
789 return mCurrentQueueSerial;
790}
791
Jamie Madill1b038242017-11-01 15:14:36 -0400792gl::Error RendererVk::ensureInRenderPass(const gl::Context *context, FramebufferVk *framebufferVk)
793{
794 if (mCurrentRenderPassFramebuffer == framebufferVk)
795 {
796 return gl::NoError();
797 }
798
799 if (mCurrentRenderPassFramebuffer)
800 {
801 endRenderPass();
802 }
803 ANGLE_TRY(
804 framebufferVk->beginRenderPass(context, mDevice, &mCommandBuffer, mCurrentQueueSerial));
805 mCurrentRenderPassFramebuffer = framebufferVk;
806 return gl::NoError();
807}
808
809void RendererVk::endRenderPass()
810{
811 if (mCurrentRenderPassFramebuffer)
812 {
813 ASSERT(mCommandBuffer.started());
814 mCommandBuffer.endRenderPass();
815 mCurrentRenderPassFramebuffer = nullptr;
816 }
817}
818
Jamie Madill7bd16662017-10-28 19:40:50 -0400819void RendererVk::onReleaseRenderPass(const FramebufferVk *framebufferVk)
820{
821 if (mCurrentRenderPassFramebuffer == framebufferVk)
822 {
823 endRenderPass();
824 }
825}
826
Jamie Madill97760352017-11-09 13:08:29 -0500827bool RendererVk::isResourceInUse(const ResourceVk &resource)
828{
829 return isSerialInUse(resource.getQueueSerial());
830}
831
832bool RendererVk::isSerialInUse(Serial serial)
833{
834 return serial > mLastCompletedQueueSerial;
835}
836
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400837} // namespace rx