blob: dc620dcd1897053a62d475ea2b850b01cd7838ce [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050020#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050021#include "libANGLE/renderer/vulkan/CompilerVk.h"
22#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050023#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050024#include "libANGLE/renderer/vulkan/TextureVk.h"
25#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050026#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050027#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040028
29namespace rx
30{
31
Jamie Madille09bd5d2016-11-29 16:20:35 -050032namespace
33{
34
35VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
36 const std::vector<const char *> &enabledExtensionNames)
37{
38 // Compile the extensions names into a set.
39 std::set<std::string> extensionNames;
40 for (const auto &extensionProp : extensionProps)
41 {
42 extensionNames.insert(extensionProp.extensionName);
43 }
44
Jamie Madillacf2f3a2017-11-21 19:22:44 -050045 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050046 {
47 if (extensionNames.count(extensionName) == 0)
48 {
49 return VK_ERROR_EXTENSION_NOT_PRESENT;
50 }
51 }
52
53 return VK_SUCCESS;
54}
55
Jamie Madill0448ec82016-12-23 13:41:47 -050056VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
57 VkDebugReportObjectTypeEXT objectType,
58 uint64_t object,
59 size_t location,
60 int32_t messageCode,
61 const char *layerPrefix,
62 const char *message,
63 void *userData)
64{
65 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
66 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050067 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050068#if !defined(NDEBUG)
69 // Abort the call in Debug builds.
70 return VK_TRUE;
71#endif
72 }
73 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
74 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050075 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050076 }
77 else
78 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050079 // Uncomment this if you want Vulkan spam.
80 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050081 }
82
83 return VK_FALSE;
84}
85
Jamie Madille09bd5d2016-11-29 16:20:35 -050086} // anonymous namespace
87
Jamie Madill9f2a8612017-11-30 12:43:09 -050088// RenderPassCache implementation.
89RenderPassCache::RenderPassCache()
90{
91}
92
93RenderPassCache::~RenderPassCache()
94{
95 ASSERT(mPayload.empty());
96}
97
98void RenderPassCache::destroy(VkDevice device)
99{
Jamie Madillbef918c2017-12-13 13:11:30 -0500100 for (auto &outerIt : mPayload)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500101 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500102 for (auto &innerIt : outerIt.second)
103 {
104 innerIt.second.get().destroy(device);
105 }
Jamie Madill9f2a8612017-11-30 12:43:09 -0500106 }
107 mPayload.clear();
108}
109
110vk::Error RenderPassCache::getCompatibleRenderPass(VkDevice device,
111 Serial serial,
112 const vk::RenderPassDesc &desc,
113 vk::RenderPass **renderPassOut)
114{
Jamie Madillbef918c2017-12-13 13:11:30 -0500115 auto outerIt = mPayload.find(desc);
116 if (outerIt != mPayload.end())
Jamie Madill9f2a8612017-11-30 12:43:09 -0500117 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500118 InnerCache &innerCache = outerIt->second;
119 ASSERT(!innerCache.empty());
Jamie Madill9f2a8612017-11-30 12:43:09 -0500120
Jamie Madillbef918c2017-12-13 13:11:30 -0500121 // Find the first element and return it.
122 *renderPassOut = &innerCache.begin()->second.get();
Jamie Madill9f2a8612017-11-30 12:43:09 -0500123 return vk::NoError();
124 }
125
Jamie Madillbef918c2017-12-13 13:11:30 -0500126 // Insert some dummy attachment ops.
127 // TODO(jmadill): Pre-populate the cache in the Renderer so we rarely miss here.
128 vk::AttachmentOpsArray ops;
129 for (uint32_t colorIndex = 0; colorIndex < desc.colorAttachmentCount(); ++colorIndex)
130 {
131 ops.initDummyOp(colorIndex, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
132 }
133
134 if (desc.depthStencilAttachmentCount() > 0)
135 {
136 ops.initDummyOp(desc.colorAttachmentCount(),
137 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
138 }
139
140 return getRenderPassWithOps(device, serial, desc, ops, renderPassOut);
141}
142
143vk::Error RenderPassCache::getRenderPassWithOps(VkDevice device,
144 Serial serial,
145 const vk::RenderPassDesc &desc,
146 const vk::AttachmentOpsArray &attachmentOps,
147 vk::RenderPass **renderPassOut)
148{
149 auto outerIt = mPayload.find(desc);
150 if (outerIt != mPayload.end())
151 {
152 InnerCache &innerCache = outerIt->second;
153
154 auto innerIt = innerCache.find(attachmentOps);
155 if (innerIt != innerCache.end())
156 {
157 // Update the serial before we return.
158 // TODO(jmadill): Could possibly use an MRU cache here.
159 innerIt->second.updateSerial(serial);
160 *renderPassOut = &innerIt->second.get();
161 return vk::NoError();
162 }
163 }
164 else
165 {
166 auto emplaceResult = mPayload.emplace(desc, InnerCache());
167 outerIt = emplaceResult.first;
168 }
169
Jamie Madill9f2a8612017-11-30 12:43:09 -0500170 vk::RenderPass newRenderPass;
Jamie Madillbef918c2017-12-13 13:11:30 -0500171 ANGLE_TRY(vk::InitializeRenderPassFromDesc(device, desc, attachmentOps, &newRenderPass));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500172
173 vk::RenderPassAndSerial withSerial(std::move(newRenderPass), serial);
174
Jamie Madillbef918c2017-12-13 13:11:30 -0500175 InnerCache &innerCache = outerIt->second;
176 auto insertPos = innerCache.emplace(attachmentOps, std::move(withSerial));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500177 *renderPassOut = &insertPos.first->second.get();
178
179 // TODO(jmadill): Trim cache, and pre-populate with the most common RPs on startup.
180 return vk::NoError();
181}
182
Jamie Madill49ac74b2017-12-21 14:42:33 -0500183// CommandBatch implementation.
184RendererVk::CommandBatch::CommandBatch()
185{
186}
187
188RendererVk::CommandBatch::~CommandBatch()
189{
190}
191
192RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
193 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
194{
195}
196
197RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
198{
199 std::swap(commandPool, other.commandPool);
200 std::swap(fence, other.fence);
201 std::swap(serial, other.serial);
202 return *this;
203}
204
Jamie Madill9f2a8612017-11-30 12:43:09 -0500205// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500206RendererVk::RendererVk()
207 : mCapsInitialized(false),
208 mInstance(VK_NULL_HANDLE),
209 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500210 mDebugReportCallback(VK_NULL_HANDLE),
211 mPhysicalDevice(VK_NULL_HANDLE),
212 mQueue(VK_NULL_HANDLE),
213 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
214 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500215 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400216 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
217 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill49ac74b2017-12-21 14:42:33 -0500218 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400219{
220}
221
222RendererVk::~RendererVk()
223{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500224 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500225 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500226 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
227 vk::Error error = finish(nullptr);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500228 if (error.isError())
229 {
230 ERR() << "Error during VK shutdown: " << error;
231 }
232 }
233
Jamie Madill9f2a8612017-11-30 12:43:09 -0500234 mRenderPassCache.destroy(mDevice);
235
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500236 if (mGlslangWrapper)
237 {
238 GlslangWrapper::ReleaseReference();
239 mGlslangWrapper = nullptr;
240 }
241
Jamie Madill5deea722017-02-16 10:44:46 -0500242 if (mCommandPool.valid())
243 {
244 mCommandPool.destroy(mDevice);
245 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500246
247 if (mDevice)
248 {
249 vkDestroyDevice(mDevice, nullptr);
250 mDevice = VK_NULL_HANDLE;
251 }
252
Jamie Madill0448ec82016-12-23 13:41:47 -0500253 if (mDebugReportCallback)
254 {
255 ASSERT(mInstance);
256 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
257 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
258 ASSERT(destroyDebugReportCallback);
259 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
260 }
261
Jamie Madill4d0bf552016-12-28 15:45:24 -0500262 if (mInstance)
263 {
264 vkDestroyInstance(mInstance, nullptr);
265 mInstance = VK_NULL_HANDLE;
266 }
267
268 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500269}
270
Frank Henigman29f148b2016-11-23 21:05:36 -0500271vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500272{
Jamie Madill222c5172017-07-19 16:15:42 -0400273 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500274
275 // If we're loading the validation layers, we could be running from any random directory.
276 // Change to the executable directory so we can find the layers, then change back to the
277 // previous directory to be safe we don't disrupt the application.
278 std::string previousCWD;
279
280 if (mEnableValidationLayers)
281 {
282 const auto &cwd = angle::GetCWD();
283 if (!cwd.valid())
284 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500285 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500286 mEnableValidationLayers = false;
287 }
288 else
289 {
290 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400291 const char *exeDir = angle::GetExecutableDirectory();
292 if (!angle::SetCWD(exeDir))
293 {
294 ERR() << "Error setting CWD for Vulkan layers init.";
295 mEnableValidationLayers = false;
296 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500297 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400298 }
299
300 // Override environment variable to use the ANGLE layers.
301 if (mEnableValidationLayers)
302 {
303 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
304 {
305 ERR() << "Error setting environment for Vulkan layers init.";
306 mEnableValidationLayers = false;
307 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500308 }
309
Jamie Madill0448ec82016-12-23 13:41:47 -0500310 // Gather global layer properties.
311 uint32_t instanceLayerCount = 0;
312 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
313
314 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
315 if (instanceLayerCount > 0)
316 {
317 ANGLE_VK_TRY(
318 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
319 }
320
Jamie Madille09bd5d2016-11-29 16:20:35 -0500321 uint32_t instanceExtensionCount = 0;
322 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
323
324 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
325 if (instanceExtensionCount > 0)
326 {
327 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
328 instanceExtensionProps.data()));
329 }
330
Jamie Madill0448ec82016-12-23 13:41:47 -0500331 if (mEnableValidationLayers)
332 {
333 // Verify the standard validation layers are available.
334 if (!HasStandardValidationLayer(instanceLayerProps))
335 {
336 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400337 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
338 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500339 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500340 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500341 }
342 else
343 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500344 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500345 }
346 mEnableValidationLayers = false;
347 }
348 }
349
Jamie Madille09bd5d2016-11-29 16:20:35 -0500350 std::vector<const char *> enabledInstanceExtensions;
351 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500352 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500353
Jamie Madill0448ec82016-12-23 13:41:47 -0500354 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
355 if (mEnableValidationLayers)
356 {
357 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
358 }
359
Jamie Madille09bd5d2016-11-29 16:20:35 -0500360 // Verify the required extensions are in the extension names set. Fail if not.
361 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
362
Jamie Madill327ba852016-11-30 12:38:28 -0500363 VkApplicationInfo applicationInfo;
364 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
365 applicationInfo.pNext = nullptr;
366 applicationInfo.pApplicationName = "ANGLE";
367 applicationInfo.applicationVersion = 1;
368 applicationInfo.pEngineName = "ANGLE";
369 applicationInfo.engineVersion = 1;
370 applicationInfo.apiVersion = VK_API_VERSION_1_0;
371
372 VkInstanceCreateInfo instanceInfo;
373 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
374 instanceInfo.pNext = nullptr;
375 instanceInfo.flags = 0;
376 instanceInfo.pApplicationInfo = &applicationInfo;
377
Jamie Madille09bd5d2016-11-29 16:20:35 -0500378 // Enable requested layers and extensions.
379 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
380 instanceInfo.ppEnabledExtensionNames =
381 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500382 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
383 instanceInfo.ppEnabledLayerNames =
384 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500385
386 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
387
Jamie Madill0448ec82016-12-23 13:41:47 -0500388 if (mEnableValidationLayers)
389 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500390 // Change back to the previous working directory now that we've loaded the instance -
391 // the validation layers should be loaded at this point.
392 angle::SetCWD(previousCWD.c_str());
393
Jamie Madill0448ec82016-12-23 13:41:47 -0500394 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
395
396 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
397 debugReportInfo.pNext = nullptr;
398 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
399 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
400 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
401 debugReportInfo.pfnCallback = &DebugReportCallback;
402 debugReportInfo.pUserData = this;
403
404 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
405 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
406 ASSERT(createDebugReportCallback);
407 ANGLE_VK_TRY(
408 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
409 }
410
Jamie Madill4d0bf552016-12-28 15:45:24 -0500411 uint32_t physicalDeviceCount = 0;
412 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
413 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
414
415 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
416 physicalDeviceCount = 1;
417 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
418
419 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
420
421 // Ensure we can find a graphics queue family.
422 uint32_t queueCount = 0;
423 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
424
425 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
426
427 mQueueFamilyProperties.resize(queueCount);
428 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
429 mQueueFamilyProperties.data());
430
431 size_t graphicsQueueFamilyCount = false;
432 uint32_t firstGraphicsQueueFamily = 0;
433 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
434 {
435 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
436 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
437 {
438 ASSERT(queueInfo.queueCount > 0);
439 graphicsQueueFamilyCount++;
440 if (firstGraphicsQueueFamily == 0)
441 {
442 firstGraphicsQueueFamily = familyIndex;
443 }
444 break;
445 }
446 }
447
448 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
449
450 // If only one queue family, go ahead and initialize the device. If there is more than one
451 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
452 if (graphicsQueueFamilyCount == 1)
453 {
454 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
455 }
456
Jamie Madill035fd6b2017-10-03 15:43:22 -0400457 // Store the physical device memory properties so we can find the right memory pools.
458 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500459
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500460 mGlslangWrapper = GlslangWrapper::GetReference();
461
Jamie Madill6a89d222017-11-02 11:59:51 -0400462 // Initialize the format table.
463 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
464
Jamie Madill327ba852016-11-30 12:38:28 -0500465 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400466}
467
Jamie Madill4d0bf552016-12-28 15:45:24 -0500468vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
469{
470 uint32_t deviceLayerCount = 0;
471 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
472
473 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
474 if (deviceLayerCount > 0)
475 {
476 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
477 deviceLayerProps.data()));
478 }
479
480 uint32_t deviceExtensionCount = 0;
481 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
482 &deviceExtensionCount, nullptr));
483
484 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
485 if (deviceExtensionCount > 0)
486 {
487 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
488 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
489 }
490
491 if (mEnableValidationLayers)
492 {
493 if (!HasStandardValidationLayer(deviceLayerProps))
494 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500495 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500496 mEnableValidationLayers = false;
497 }
498 }
499
500 std::vector<const char *> enabledDeviceExtensions;
501 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
502
503 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
504
505 VkDeviceQueueCreateInfo queueCreateInfo;
506
507 float zeroPriority = 0.0f;
508
509 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
510 queueCreateInfo.pNext = nullptr;
511 queueCreateInfo.flags = 0;
512 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
513 queueCreateInfo.queueCount = 1;
514 queueCreateInfo.pQueuePriorities = &zeroPriority;
515
516 // Initialize the device
517 VkDeviceCreateInfo createInfo;
518
519 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
520 createInfo.pNext = nullptr;
521 createInfo.flags = 0;
522 createInfo.queueCreateInfoCount = 1;
523 createInfo.pQueueCreateInfos = &queueCreateInfo;
524 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
525 createInfo.ppEnabledLayerNames =
526 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
527 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
528 createInfo.ppEnabledExtensionNames =
529 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
530 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
531
532 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
533
534 mCurrentQueueFamilyIndex = queueFamilyIndex;
535
536 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
537
538 // Initialize the command pool now that we know the queue family index.
539 VkCommandPoolCreateInfo commandPoolInfo;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500540 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
541 commandPoolInfo.pNext = nullptr;
542 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500543 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
544
Jamie Madill5deea722017-02-16 10:44:46 -0500545 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500546
Jamie Madill4d0bf552016-12-28 15:45:24 -0500547 return vk::NoError();
548}
549
550vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
551{
552 // We've already initialized a device, and can't re-create it unless it's never been used.
553 // TODO(jmadill): Handle the re-creation case if necessary.
554 if (mDevice != VK_NULL_HANDLE)
555 {
556 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
557
558 // Check if the current device supports present on this surface.
559 VkBool32 supportsPresent = VK_FALSE;
560 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
561 surface, &supportsPresent));
562
563 return (supportsPresent == VK_TRUE);
564 }
565
566 // Find a graphics and present queue.
567 Optional<uint32_t> newPresentQueue;
568 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
569 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
570 {
571 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
572 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
573 {
574 VkBool32 supportsPresent = VK_FALSE;
575 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
576 &supportsPresent));
577
578 if (supportsPresent == VK_TRUE)
579 {
580 newPresentQueue = queueIndex;
581 break;
582 }
583 }
584 }
585
586 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
587 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
588
589 return newPresentQueue.value();
590}
591
592std::string RendererVk::getVendorString() const
593{
594 switch (mPhysicalDeviceProperties.vendorID)
595 {
596 case VENDOR_ID_AMD:
597 return "Advanced Micro Devices";
598 case VENDOR_ID_NVIDIA:
599 return "NVIDIA";
600 case VENDOR_ID_INTEL:
601 return "Intel";
602 default:
603 {
604 // TODO(jmadill): More vendor IDs.
605 std::stringstream strstr;
606 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
607 return strstr.str();
608 }
609 }
610}
611
Jamie Madille09bd5d2016-11-29 16:20:35 -0500612std::string RendererVk::getRendererDescription() const
613{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500614 std::stringstream strstr;
615
616 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
617
618 strstr << "Vulkan ";
619 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
620 strstr << VK_VERSION_MINOR(apiVersion) << ".";
621 strstr << VK_VERSION_PATCH(apiVersion);
622
623 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
624
625 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500626}
627
Jamie Madillacccc6c2016-05-03 17:22:10 -0400628void RendererVk::ensureCapsInitialized() const
629{
630 if (!mCapsInitialized)
631 {
632 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
633 mCapsInitialized = true;
634 }
635}
636
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500637void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400638 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800639 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400640 gl::Limitations * /* outLimitations */) const
641{
Jamie Madill327ba852016-11-30 12:38:28 -0500642 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500643 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800644 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
645 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400646 outCaps->maxVaryingVectors = 16;
647 outCaps->maxTextureImageUnits = 1;
648 outCaps->maxCombinedTextureImageUnits = 1;
649 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400650 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400651 outCaps->maxFragmentUniformVectors = 8;
652 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400653 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800654
655 // Enable this for simple buffer readback testing, but some functionality is missing.
656 // TODO(jmadill): Support full mapBufferRange extension.
657 outExtensions->mapBuffer = true;
658 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400659}
660
661const gl::Caps &RendererVk::getNativeCaps() const
662{
663 ensureCapsInitialized();
664 return mNativeCaps;
665}
666
667const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
668{
669 ensureCapsInitialized();
670 return mNativeTextureCaps;
671}
672
673const gl::Extensions &RendererVk::getNativeExtensions() const
674{
675 ensureCapsInitialized();
676 return mNativeExtensions;
677}
678
679const gl::Limitations &RendererVk::getNativeLimitations() const
680{
681 ensureCapsInitialized();
682 return mNativeLimitations;
683}
684
Jamie Madill49ac74b2017-12-21 14:42:33 -0500685const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500686{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500687 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500688}
689
Jamie Madill49ac74b2017-12-21 14:42:33 -0500690vk::Error RendererVk::finish(const gl::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500691{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500692 if (!mOpenCommandGraph.empty())
693 {
694 vk::CommandBuffer commandBatch;
695 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400696
Jamie Madill49ac74b2017-12-21 14:42:33 -0500697 VkSubmitInfo submitInfo;
698 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
699 submitInfo.pNext = nullptr;
700 submitInfo.waitSemaphoreCount = 0;
701 submitInfo.pWaitSemaphores = nullptr;
702 submitInfo.pWaitDstStageMask = nullptr;
703 submitInfo.commandBufferCount = 1;
704 submitInfo.pCommandBuffers = commandBatch.ptr();
705 submitInfo.signalSemaphoreCount = 0;
706 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500707
Jamie Madill49ac74b2017-12-21 14:42:33 -0500708 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
709 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500710
Jamie Madill4c26fc22017-02-24 11:04:10 -0500711 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500712 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400713 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500714 return vk::NoError();
715}
716
Jamie Madill0c0dc342017-03-24 14:18:51 -0400717void RendererVk::freeAllInFlightResources()
718{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500719 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -0400720 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500721 batch.fence.destroy(mDevice);
722 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400723 }
724 mInFlightCommands.clear();
725
726 for (auto &garbage : mGarbage)
727 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400728 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400729 }
730 mGarbage.clear();
731}
732
Jamie Madill4c26fc22017-02-24 11:04:10 -0500733vk::Error RendererVk::checkInFlightCommands()
734{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500735 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500736
Jamie Madill49ac74b2017-12-21 14:42:33 -0500737 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500738 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500739 VkResult result = batch.fence.getStatus(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400740 if (result == VK_NOT_READY)
741 break;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500742
Jamie Madill0c0dc342017-03-24 14:18:51 -0400743 ANGLE_VK_TRY(result);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500744 ASSERT(batch.serial > mLastCompletedQueueSerial);
745 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400746
Jamie Madill49ac74b2017-12-21 14:42:33 -0500747 batch.fence.destroy(mDevice);
748 batch.commandPool.destroy(mDevice);
749 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500750 }
751
Jamie Madill49ac74b2017-12-21 14:42:33 -0500752 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400753
754 size_t freeIndex = 0;
755 for (; freeIndex < mGarbage.size(); ++freeIndex)
756 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500757 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400758 break;
759 }
760
761 // Remove the entries from the garbage list - they should be ready to go.
762 if (freeIndex > 0)
763 {
764 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500765 }
766
Jamie Madill4c26fc22017-02-24 11:04:10 -0500767 return vk::NoError();
768}
769
Jamie Madill49ac74b2017-12-21 14:42:33 -0500770vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo, vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500771{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500772 VkFenceCreateInfo fenceInfo;
773 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
774 fenceInfo.pNext = nullptr;
775 fenceInfo.flags = 0;
776
777 CommandBatch batch;
778 ANGLE_TRY(batch.fence.init(mDevice, fenceInfo));
779
780 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500781
782 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500783 batch.commandPool = std::move(mCommandPool);
784 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500785
Jamie Madill49ac74b2017-12-21 14:42:33 -0500786 mInFlightCommands.emplace_back(std::move(batch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400787
788 // Sanity check.
789 ASSERT(mInFlightCommands.size() < 1000u);
790
791 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400792 // TODO(jmadill): Overflow check.
793 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400794
795 ANGLE_TRY(checkInFlightCommands());
796
Jamie Madill49ac74b2017-12-21 14:42:33 -0500797 // Simply null out the command buffer here - it was allocated using the command pool.
798 commandBuffer.releaseHandle();
799
800 // Reallocate the command pool for next frame.
801 // TODO(jmadill): Consider reusing command pools.
802 VkCommandPoolCreateInfo poolInfo;
803 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
804 poolInfo.pNext = nullptr;
805 poolInfo.flags = 0;
806 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
807
808 mCommandPool.init(mDevice, poolInfo);
809
Jamie Madill4c26fc22017-02-24 11:04:10 -0500810 return vk::NoError();
811}
812
Jamie Madill5deea722017-02-16 10:44:46 -0500813vk::Error RendererVk::createStagingImage(TextureDimension dimension,
814 const vk::Format &format,
815 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400816 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500817 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500818{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400819 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
Jamie Madill1d7be502017-10-29 18:06:50 -0400820 format.vkTextureFormat, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500821 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500822}
823
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500824GlslangWrapper *RendererVk::getGlslangWrapper()
825{
826 return mGlslangWrapper;
827}
828
Jamie Madill4c26fc22017-02-24 11:04:10 -0500829Serial RendererVk::getCurrentQueueSerial() const
830{
831 return mCurrentQueueSerial;
832}
833
Jamie Madill97760352017-11-09 13:08:29 -0500834bool RendererVk::isResourceInUse(const ResourceVk &resource)
835{
836 return isSerialInUse(resource.getQueueSerial());
837}
838
839bool RendererVk::isSerialInUse(Serial serial)
840{
841 return serial > mLastCompletedQueueSerial;
842}
843
Jamie Madill9f2a8612017-11-30 12:43:09 -0500844vk::Error RendererVk::getCompatibleRenderPass(const vk::RenderPassDesc &desc,
845 vk::RenderPass **renderPassOut)
846{
847 return mRenderPassCache.getCompatibleRenderPass(mDevice, mCurrentQueueSerial, desc,
848 renderPassOut);
849}
850
Jamie Madillbef918c2017-12-13 13:11:30 -0500851vk::Error RendererVk::getRenderPassWithOps(const vk::RenderPassDesc &desc,
852 const vk::AttachmentOpsArray &ops,
853 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500854{
Jamie Madillbef918c2017-12-13 13:11:30 -0500855 return mRenderPassCache.getRenderPassWithOps(mDevice, mCurrentQueueSerial, desc, ops,
856 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500857}
858
Jamie Madill49ac74b2017-12-21 14:42:33 -0500859vk::CommandBufferNode *RendererVk::allocateCommandNode()
860{
861 // TODO(jmadill): Use a pool allocator for the CPU node allocations.
862 vk::CommandBufferNode *newCommands = new vk::CommandBufferNode();
863 mOpenCommandGraph.emplace_back(newCommands);
864 return newCommands;
865}
866
867vk::Error RendererVk::flushCommandGraph(const gl::Context *context, vk::CommandBuffer *commandBatch)
868{
869 VkCommandBufferAllocateInfo primaryInfo;
870 primaryInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
871 primaryInfo.pNext = nullptr;
872 primaryInfo.commandPool = mCommandPool.getHandle();
873 primaryInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
874 primaryInfo.commandBufferCount = 1;
875
876 ANGLE_TRY(commandBatch->init(mDevice, primaryInfo));
877
878 if (mOpenCommandGraph.empty())
879 {
880 return vk::NoError();
881 }
882
883 std::vector<vk::CommandBufferNode *> nodeStack;
884
885 VkCommandBufferBeginInfo beginInfo;
886 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
887 beginInfo.pNext = nullptr;
888 beginInfo.flags = 0;
889 beginInfo.pInheritanceInfo = nullptr;
890
891 ANGLE_TRY(commandBatch->begin(beginInfo));
892
893 for (vk::CommandBufferNode *topLevelNode : mOpenCommandGraph)
894 {
895 // Only process commands that don't have child commands. The others will be pulled in
896 // automatically. Also skip commands that have already been visited.
897 if (topLevelNode->isDependency() ||
898 topLevelNode->visitedState() != vk::VisitedState::Unvisited)
899 continue;
900
901 nodeStack.push_back(topLevelNode);
902
903 while (!nodeStack.empty())
904 {
905 vk::CommandBufferNode *node = nodeStack.back();
906
907 switch (node->visitedState())
908 {
909 case vk::VisitedState::Unvisited:
910 node->visitDependencies(&nodeStack);
911 break;
912 case vk::VisitedState::Ready:
913 ANGLE_TRY(node->visitAndExecute(this, commandBatch));
914 nodeStack.pop_back();
915 break;
916 case vk::VisitedState::Visited:
917 nodeStack.pop_back();
918 break;
919 default:
920 UNREACHABLE();
921 break;
922 }
923 }
924 }
925
926 ANGLE_TRY(commandBatch->end());
927 return vk::NoError();
928}
929
930void RendererVk::resetCommandGraph()
931{
932 // TODO(jmadill): Use pool allocation so we don't need to deallocate command graph.
933 for (vk::CommandBufferNode *node : mOpenCommandGraph)
934 {
935 delete node;
936 }
937 mOpenCommandGraph.clear();
938}
939
940vk::Error RendererVk::flush(const gl::Context *context,
941 const vk::Semaphore &waitSemaphore,
942 const vk::Semaphore &signalSemaphore)
943{
944 vk::CommandBuffer commandBatch;
945 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
946
947 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
948
949 VkSubmitInfo submitInfo;
950 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
951 submitInfo.pNext = nullptr;
952 submitInfo.waitSemaphoreCount = 1;
953 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
954 submitInfo.pWaitDstStageMask = &waitStageMask;
955 submitInfo.commandBufferCount = 1;
956 submitInfo.pCommandBuffers = commandBatch.ptr();
957 submitInfo.signalSemaphoreCount = 1;
958 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
959
960 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
961 return vk::NoError();
962}
963
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400964} // namespace rx