blob: 3ac7573094cb5dff0571bf4595b72a3fc24c9606 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050020#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050021#include "libANGLE/renderer/vulkan/CompilerVk.h"
22#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050023#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050024#include "libANGLE/renderer/vulkan/TextureVk.h"
25#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050026#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050027#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040028
29namespace rx
30{
31
Jamie Madille09bd5d2016-11-29 16:20:35 -050032namespace
33{
34
35VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
36 const std::vector<const char *> &enabledExtensionNames)
37{
38 // Compile the extensions names into a set.
39 std::set<std::string> extensionNames;
40 for (const auto &extensionProp : extensionProps)
41 {
42 extensionNames.insert(extensionProp.extensionName);
43 }
44
Jamie Madillacf2f3a2017-11-21 19:22:44 -050045 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050046 {
47 if (extensionNames.count(extensionName) == 0)
48 {
49 return VK_ERROR_EXTENSION_NOT_PRESENT;
50 }
51 }
52
53 return VK_SUCCESS;
54}
55
Jamie Madill0448ec82016-12-23 13:41:47 -050056VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
57 VkDebugReportObjectTypeEXT objectType,
58 uint64_t object,
59 size_t location,
60 int32_t messageCode,
61 const char *layerPrefix,
62 const char *message,
63 void *userData)
64{
65 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
66 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050067 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050068#if !defined(NDEBUG)
69 // Abort the call in Debug builds.
70 return VK_TRUE;
71#endif
72 }
73 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
74 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050075 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050076 }
77 else
78 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050079 // Uncomment this if you want Vulkan spam.
80 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050081 }
82
83 return VK_FALSE;
84}
85
Jamie Madille09bd5d2016-11-29 16:20:35 -050086} // anonymous namespace
87
Jamie Madill9f2a8612017-11-30 12:43:09 -050088// RenderPassCache implementation.
89RenderPassCache::RenderPassCache()
90{
91}
92
93RenderPassCache::~RenderPassCache()
94{
95 ASSERT(mPayload.empty());
96}
97
98void RenderPassCache::destroy(VkDevice device)
99{
Jamie Madillbef918c2017-12-13 13:11:30 -0500100 for (auto &outerIt : mPayload)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500101 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500102 for (auto &innerIt : outerIt.second)
103 {
104 innerIt.second.get().destroy(device);
105 }
Jamie Madill9f2a8612017-11-30 12:43:09 -0500106 }
107 mPayload.clear();
108}
109
110vk::Error RenderPassCache::getCompatibleRenderPass(VkDevice device,
111 Serial serial,
112 const vk::RenderPassDesc &desc,
113 vk::RenderPass **renderPassOut)
114{
Jamie Madillbef918c2017-12-13 13:11:30 -0500115 auto outerIt = mPayload.find(desc);
116 if (outerIt != mPayload.end())
Jamie Madill9f2a8612017-11-30 12:43:09 -0500117 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500118 InnerCache &innerCache = outerIt->second;
119 ASSERT(!innerCache.empty());
Jamie Madill9f2a8612017-11-30 12:43:09 -0500120
Jamie Madillbef918c2017-12-13 13:11:30 -0500121 // Find the first element and return it.
122 *renderPassOut = &innerCache.begin()->second.get();
Jamie Madill9f2a8612017-11-30 12:43:09 -0500123 return vk::NoError();
124 }
125
Jamie Madillbef918c2017-12-13 13:11:30 -0500126 // Insert some dummy attachment ops.
127 // TODO(jmadill): Pre-populate the cache in the Renderer so we rarely miss here.
128 vk::AttachmentOpsArray ops;
129 for (uint32_t colorIndex = 0; colorIndex < desc.colorAttachmentCount(); ++colorIndex)
130 {
131 ops.initDummyOp(colorIndex, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
132 }
133
134 if (desc.depthStencilAttachmentCount() > 0)
135 {
136 ops.initDummyOp(desc.colorAttachmentCount(),
137 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
138 }
139
140 return getRenderPassWithOps(device, serial, desc, ops, renderPassOut);
141}
142
143vk::Error RenderPassCache::getRenderPassWithOps(VkDevice device,
144 Serial serial,
145 const vk::RenderPassDesc &desc,
146 const vk::AttachmentOpsArray &attachmentOps,
147 vk::RenderPass **renderPassOut)
148{
149 auto outerIt = mPayload.find(desc);
150 if (outerIt != mPayload.end())
151 {
152 InnerCache &innerCache = outerIt->second;
153
154 auto innerIt = innerCache.find(attachmentOps);
155 if (innerIt != innerCache.end())
156 {
157 // Update the serial before we return.
158 // TODO(jmadill): Could possibly use an MRU cache here.
159 innerIt->second.updateSerial(serial);
160 *renderPassOut = &innerIt->second.get();
161 return vk::NoError();
162 }
163 }
164 else
165 {
166 auto emplaceResult = mPayload.emplace(desc, InnerCache());
167 outerIt = emplaceResult.first;
168 }
169
Jamie Madill9f2a8612017-11-30 12:43:09 -0500170 vk::RenderPass newRenderPass;
Jamie Madillbef918c2017-12-13 13:11:30 -0500171 ANGLE_TRY(vk::InitializeRenderPassFromDesc(device, desc, attachmentOps, &newRenderPass));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500172
173 vk::RenderPassAndSerial withSerial(std::move(newRenderPass), serial);
174
Jamie Madillbef918c2017-12-13 13:11:30 -0500175 InnerCache &innerCache = outerIt->second;
176 auto insertPos = innerCache.emplace(attachmentOps, std::move(withSerial));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500177 *renderPassOut = &insertPos.first->second.get();
178
179 // TODO(jmadill): Trim cache, and pre-populate with the most common RPs on startup.
180 return vk::NoError();
181}
182
Jamie Madill49ac74b2017-12-21 14:42:33 -0500183// CommandBatch implementation.
184RendererVk::CommandBatch::CommandBatch()
185{
186}
187
188RendererVk::CommandBatch::~CommandBatch()
189{
190}
191
192RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
193 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
194{
195}
196
197RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
198{
199 std::swap(commandPool, other.commandPool);
200 std::swap(fence, other.fence);
201 std::swap(serial, other.serial);
202 return *this;
203}
204
Jamie Madill9f2a8612017-11-30 12:43:09 -0500205// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500206RendererVk::RendererVk()
207 : mCapsInitialized(false),
208 mInstance(VK_NULL_HANDLE),
209 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500210 mDebugReportCallback(VK_NULL_HANDLE),
211 mPhysicalDevice(VK_NULL_HANDLE),
212 mQueue(VK_NULL_HANDLE),
213 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
214 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500215 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400216 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
217 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill49ac74b2017-12-21 14:42:33 -0500218 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400219{
220}
221
222RendererVk::~RendererVk()
223{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500224 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500225 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500226 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
227 vk::Error error = finish(nullptr);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500228 if (error.isError())
229 {
230 ERR() << "Error during VK shutdown: " << error;
231 }
232 }
233
Jamie Madill8c3988c2017-12-21 14:44:56 -0500234 for (auto &descriptorSetLayout : mGraphicsDescriptorSetLayouts)
235 {
236 descriptorSetLayout.destroy(mDevice);
237 }
238
239 mGraphicsPipelineLayout.destroy(mDevice);
240
Jamie Madill9f2a8612017-11-30 12:43:09 -0500241 mRenderPassCache.destroy(mDevice);
242
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500243 if (mGlslangWrapper)
244 {
245 GlslangWrapper::ReleaseReference();
246 mGlslangWrapper = nullptr;
247 }
248
Jamie Madill5deea722017-02-16 10:44:46 -0500249 if (mCommandPool.valid())
250 {
251 mCommandPool.destroy(mDevice);
252 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500253
254 if (mDevice)
255 {
256 vkDestroyDevice(mDevice, nullptr);
257 mDevice = VK_NULL_HANDLE;
258 }
259
Jamie Madill0448ec82016-12-23 13:41:47 -0500260 if (mDebugReportCallback)
261 {
262 ASSERT(mInstance);
263 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
264 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
265 ASSERT(destroyDebugReportCallback);
266 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
267 }
268
Jamie Madill4d0bf552016-12-28 15:45:24 -0500269 if (mInstance)
270 {
271 vkDestroyInstance(mInstance, nullptr);
272 mInstance = VK_NULL_HANDLE;
273 }
274
275 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500276}
277
Frank Henigman29f148b2016-11-23 21:05:36 -0500278vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500279{
Jamie Madill222c5172017-07-19 16:15:42 -0400280 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500281
282 // If we're loading the validation layers, we could be running from any random directory.
283 // Change to the executable directory so we can find the layers, then change back to the
284 // previous directory to be safe we don't disrupt the application.
285 std::string previousCWD;
286
287 if (mEnableValidationLayers)
288 {
289 const auto &cwd = angle::GetCWD();
290 if (!cwd.valid())
291 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500292 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500293 mEnableValidationLayers = false;
294 }
295 else
296 {
297 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400298 const char *exeDir = angle::GetExecutableDirectory();
299 if (!angle::SetCWD(exeDir))
300 {
301 ERR() << "Error setting CWD for Vulkan layers init.";
302 mEnableValidationLayers = false;
303 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500304 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400305 }
306
307 // Override environment variable to use the ANGLE layers.
308 if (mEnableValidationLayers)
309 {
310 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
311 {
312 ERR() << "Error setting environment for Vulkan layers init.";
313 mEnableValidationLayers = false;
314 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500315 }
316
Jamie Madill0448ec82016-12-23 13:41:47 -0500317 // Gather global layer properties.
318 uint32_t instanceLayerCount = 0;
319 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
320
321 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
322 if (instanceLayerCount > 0)
323 {
324 ANGLE_VK_TRY(
325 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
326 }
327
Jamie Madille09bd5d2016-11-29 16:20:35 -0500328 uint32_t instanceExtensionCount = 0;
329 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
330
331 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
332 if (instanceExtensionCount > 0)
333 {
334 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
335 instanceExtensionProps.data()));
336 }
337
Jamie Madill0448ec82016-12-23 13:41:47 -0500338 if (mEnableValidationLayers)
339 {
340 // Verify the standard validation layers are available.
341 if (!HasStandardValidationLayer(instanceLayerProps))
342 {
343 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400344 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
345 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500346 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500347 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500348 }
349 else
350 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500351 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500352 }
353 mEnableValidationLayers = false;
354 }
355 }
356
Jamie Madille09bd5d2016-11-29 16:20:35 -0500357 std::vector<const char *> enabledInstanceExtensions;
358 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500359 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500360
Jamie Madill0448ec82016-12-23 13:41:47 -0500361 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
362 if (mEnableValidationLayers)
363 {
364 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
365 }
366
Jamie Madille09bd5d2016-11-29 16:20:35 -0500367 // Verify the required extensions are in the extension names set. Fail if not.
368 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
369
Jamie Madill327ba852016-11-30 12:38:28 -0500370 VkApplicationInfo applicationInfo;
371 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
372 applicationInfo.pNext = nullptr;
373 applicationInfo.pApplicationName = "ANGLE";
374 applicationInfo.applicationVersion = 1;
375 applicationInfo.pEngineName = "ANGLE";
376 applicationInfo.engineVersion = 1;
377 applicationInfo.apiVersion = VK_API_VERSION_1_0;
378
379 VkInstanceCreateInfo instanceInfo;
380 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
381 instanceInfo.pNext = nullptr;
382 instanceInfo.flags = 0;
383 instanceInfo.pApplicationInfo = &applicationInfo;
384
Jamie Madille09bd5d2016-11-29 16:20:35 -0500385 // Enable requested layers and extensions.
386 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
387 instanceInfo.ppEnabledExtensionNames =
388 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500389 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
390 instanceInfo.ppEnabledLayerNames =
391 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500392
393 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
394
Jamie Madill0448ec82016-12-23 13:41:47 -0500395 if (mEnableValidationLayers)
396 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500397 // Change back to the previous working directory now that we've loaded the instance -
398 // the validation layers should be loaded at this point.
399 angle::SetCWD(previousCWD.c_str());
400
Jamie Madill0448ec82016-12-23 13:41:47 -0500401 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
402
403 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
404 debugReportInfo.pNext = nullptr;
405 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
406 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
407 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
408 debugReportInfo.pfnCallback = &DebugReportCallback;
409 debugReportInfo.pUserData = this;
410
411 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
412 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
413 ASSERT(createDebugReportCallback);
414 ANGLE_VK_TRY(
415 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
416 }
417
Jamie Madill4d0bf552016-12-28 15:45:24 -0500418 uint32_t physicalDeviceCount = 0;
419 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
420 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
421
422 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
423 physicalDeviceCount = 1;
424 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
425
426 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
427
428 // Ensure we can find a graphics queue family.
429 uint32_t queueCount = 0;
430 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
431
432 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
433
434 mQueueFamilyProperties.resize(queueCount);
435 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
436 mQueueFamilyProperties.data());
437
438 size_t graphicsQueueFamilyCount = false;
439 uint32_t firstGraphicsQueueFamily = 0;
440 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
441 {
442 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
443 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
444 {
445 ASSERT(queueInfo.queueCount > 0);
446 graphicsQueueFamilyCount++;
447 if (firstGraphicsQueueFamily == 0)
448 {
449 firstGraphicsQueueFamily = familyIndex;
450 }
451 break;
452 }
453 }
454
455 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
456
457 // If only one queue family, go ahead and initialize the device. If there is more than one
458 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
459 if (graphicsQueueFamilyCount == 1)
460 {
461 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
462 }
463
Jamie Madill035fd6b2017-10-03 15:43:22 -0400464 // Store the physical device memory properties so we can find the right memory pools.
465 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500466
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500467 mGlslangWrapper = GlslangWrapper::GetReference();
468
Jamie Madill6a89d222017-11-02 11:59:51 -0400469 // Initialize the format table.
470 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
471
Jamie Madill8c3988c2017-12-21 14:44:56 -0500472 // Initialize the pipeline layout for GL programs.
473 ANGLE_TRY(initGraphicsPipelineLayout());
474
Jamie Madill327ba852016-11-30 12:38:28 -0500475 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400476}
477
Jamie Madill4d0bf552016-12-28 15:45:24 -0500478vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
479{
480 uint32_t deviceLayerCount = 0;
481 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
482
483 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
484 if (deviceLayerCount > 0)
485 {
486 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
487 deviceLayerProps.data()));
488 }
489
490 uint32_t deviceExtensionCount = 0;
491 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
492 &deviceExtensionCount, nullptr));
493
494 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
495 if (deviceExtensionCount > 0)
496 {
497 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
498 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
499 }
500
501 if (mEnableValidationLayers)
502 {
503 if (!HasStandardValidationLayer(deviceLayerProps))
504 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500505 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500506 mEnableValidationLayers = false;
507 }
508 }
509
510 std::vector<const char *> enabledDeviceExtensions;
511 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
512
513 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
514
515 VkDeviceQueueCreateInfo queueCreateInfo;
516
517 float zeroPriority = 0.0f;
518
519 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
520 queueCreateInfo.pNext = nullptr;
521 queueCreateInfo.flags = 0;
522 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
523 queueCreateInfo.queueCount = 1;
524 queueCreateInfo.pQueuePriorities = &zeroPriority;
525
526 // Initialize the device
527 VkDeviceCreateInfo createInfo;
528
529 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
530 createInfo.pNext = nullptr;
531 createInfo.flags = 0;
532 createInfo.queueCreateInfoCount = 1;
533 createInfo.pQueueCreateInfos = &queueCreateInfo;
534 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
535 createInfo.ppEnabledLayerNames =
536 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
537 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
538 createInfo.ppEnabledExtensionNames =
539 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
540 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
541
542 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
543
544 mCurrentQueueFamilyIndex = queueFamilyIndex;
545
546 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
547
548 // Initialize the command pool now that we know the queue family index.
549 VkCommandPoolCreateInfo commandPoolInfo;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500550 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
551 commandPoolInfo.pNext = nullptr;
552 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500553 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
554
Jamie Madill5deea722017-02-16 10:44:46 -0500555 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500556
Jamie Madill4d0bf552016-12-28 15:45:24 -0500557 return vk::NoError();
558}
559
560vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
561{
562 // We've already initialized a device, and can't re-create it unless it's never been used.
563 // TODO(jmadill): Handle the re-creation case if necessary.
564 if (mDevice != VK_NULL_HANDLE)
565 {
566 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
567
568 // Check if the current device supports present on this surface.
569 VkBool32 supportsPresent = VK_FALSE;
570 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
571 surface, &supportsPresent));
572
573 return (supportsPresent == VK_TRUE);
574 }
575
576 // Find a graphics and present queue.
577 Optional<uint32_t> newPresentQueue;
578 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
579 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
580 {
581 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
582 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
583 {
584 VkBool32 supportsPresent = VK_FALSE;
585 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
586 &supportsPresent));
587
588 if (supportsPresent == VK_TRUE)
589 {
590 newPresentQueue = queueIndex;
591 break;
592 }
593 }
594 }
595
596 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
597 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
598
599 return newPresentQueue.value();
600}
601
602std::string RendererVk::getVendorString() const
603{
604 switch (mPhysicalDeviceProperties.vendorID)
605 {
606 case VENDOR_ID_AMD:
607 return "Advanced Micro Devices";
608 case VENDOR_ID_NVIDIA:
609 return "NVIDIA";
610 case VENDOR_ID_INTEL:
611 return "Intel";
612 default:
613 {
614 // TODO(jmadill): More vendor IDs.
615 std::stringstream strstr;
616 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
617 return strstr.str();
618 }
619 }
620}
621
Jamie Madille09bd5d2016-11-29 16:20:35 -0500622std::string RendererVk::getRendererDescription() const
623{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500624 std::stringstream strstr;
625
626 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
627
628 strstr << "Vulkan ";
629 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
630 strstr << VK_VERSION_MINOR(apiVersion) << ".";
631 strstr << VK_VERSION_PATCH(apiVersion);
632
633 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
634
635 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500636}
637
Jamie Madillacccc6c2016-05-03 17:22:10 -0400638void RendererVk::ensureCapsInitialized() const
639{
640 if (!mCapsInitialized)
641 {
642 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
643 mCapsInitialized = true;
644 }
645}
646
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500647void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400648 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800649 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400650 gl::Limitations * /* outLimitations */) const
651{
Jamie Madill327ba852016-11-30 12:38:28 -0500652 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500653 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800654 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
655 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400656 outCaps->maxVaryingVectors = 16;
657 outCaps->maxTextureImageUnits = 1;
658 outCaps->maxCombinedTextureImageUnits = 1;
659 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400660 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400661 outCaps->maxFragmentUniformVectors = 8;
662 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400663 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800664
665 // Enable this for simple buffer readback testing, but some functionality is missing.
666 // TODO(jmadill): Support full mapBufferRange extension.
667 outExtensions->mapBuffer = true;
668 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400669}
670
671const gl::Caps &RendererVk::getNativeCaps() const
672{
673 ensureCapsInitialized();
674 return mNativeCaps;
675}
676
677const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
678{
679 ensureCapsInitialized();
680 return mNativeTextureCaps;
681}
682
683const gl::Extensions &RendererVk::getNativeExtensions() const
684{
685 ensureCapsInitialized();
686 return mNativeExtensions;
687}
688
689const gl::Limitations &RendererVk::getNativeLimitations() const
690{
691 ensureCapsInitialized();
692 return mNativeLimitations;
693}
694
Jamie Madill49ac74b2017-12-21 14:42:33 -0500695const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500696{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500697 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500698}
699
Jamie Madill49ac74b2017-12-21 14:42:33 -0500700vk::Error RendererVk::finish(const gl::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500701{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500702 if (!mOpenCommandGraph.empty())
703 {
704 vk::CommandBuffer commandBatch;
705 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400706
Jamie Madill49ac74b2017-12-21 14:42:33 -0500707 VkSubmitInfo submitInfo;
708 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
709 submitInfo.pNext = nullptr;
710 submitInfo.waitSemaphoreCount = 0;
711 submitInfo.pWaitSemaphores = nullptr;
712 submitInfo.pWaitDstStageMask = nullptr;
713 submitInfo.commandBufferCount = 1;
714 submitInfo.pCommandBuffers = commandBatch.ptr();
715 submitInfo.signalSemaphoreCount = 0;
716 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500717
Jamie Madill49ac74b2017-12-21 14:42:33 -0500718 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
719 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500720
Jamie Madill4c26fc22017-02-24 11:04:10 -0500721 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500722 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400723 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500724 return vk::NoError();
725}
726
Jamie Madill0c0dc342017-03-24 14:18:51 -0400727void RendererVk::freeAllInFlightResources()
728{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500729 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -0400730 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500731 batch.fence.destroy(mDevice);
732 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400733 }
734 mInFlightCommands.clear();
735
736 for (auto &garbage : mGarbage)
737 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400738 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400739 }
740 mGarbage.clear();
741}
742
Jamie Madill4c26fc22017-02-24 11:04:10 -0500743vk::Error RendererVk::checkInFlightCommands()
744{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500745 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500746
Jamie Madill49ac74b2017-12-21 14:42:33 -0500747 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500748 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500749 VkResult result = batch.fence.getStatus(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400750 if (result == VK_NOT_READY)
751 break;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500752
Jamie Madill0c0dc342017-03-24 14:18:51 -0400753 ANGLE_VK_TRY(result);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500754 ASSERT(batch.serial > mLastCompletedQueueSerial);
755 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400756
Jamie Madill49ac74b2017-12-21 14:42:33 -0500757 batch.fence.destroy(mDevice);
758 batch.commandPool.destroy(mDevice);
759 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500760 }
761
Jamie Madill49ac74b2017-12-21 14:42:33 -0500762 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400763
764 size_t freeIndex = 0;
765 for (; freeIndex < mGarbage.size(); ++freeIndex)
766 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500767 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400768 break;
769 }
770
771 // Remove the entries from the garbage list - they should be ready to go.
772 if (freeIndex > 0)
773 {
774 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500775 }
776
Jamie Madill4c26fc22017-02-24 11:04:10 -0500777 return vk::NoError();
778}
779
Jamie Madill49ac74b2017-12-21 14:42:33 -0500780vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo, vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500781{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500782 VkFenceCreateInfo fenceInfo;
783 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
784 fenceInfo.pNext = nullptr;
785 fenceInfo.flags = 0;
786
787 CommandBatch batch;
788 ANGLE_TRY(batch.fence.init(mDevice, fenceInfo));
789
790 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500791
792 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500793 batch.commandPool = std::move(mCommandPool);
794 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500795
Jamie Madill49ac74b2017-12-21 14:42:33 -0500796 mInFlightCommands.emplace_back(std::move(batch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400797
798 // Sanity check.
799 ASSERT(mInFlightCommands.size() < 1000u);
800
801 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400802 // TODO(jmadill): Overflow check.
803 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400804
805 ANGLE_TRY(checkInFlightCommands());
806
Jamie Madill49ac74b2017-12-21 14:42:33 -0500807 // Simply null out the command buffer here - it was allocated using the command pool.
808 commandBuffer.releaseHandle();
809
810 // Reallocate the command pool for next frame.
811 // TODO(jmadill): Consider reusing command pools.
812 VkCommandPoolCreateInfo poolInfo;
813 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
814 poolInfo.pNext = nullptr;
815 poolInfo.flags = 0;
816 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
817
818 mCommandPool.init(mDevice, poolInfo);
819
Jamie Madill4c26fc22017-02-24 11:04:10 -0500820 return vk::NoError();
821}
822
Jamie Madill5deea722017-02-16 10:44:46 -0500823vk::Error RendererVk::createStagingImage(TextureDimension dimension,
824 const vk::Format &format,
825 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400826 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500827 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500828{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400829 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
Jamie Madill1d7be502017-10-29 18:06:50 -0400830 format.vkTextureFormat, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500831 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500832}
833
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500834GlslangWrapper *RendererVk::getGlslangWrapper()
835{
836 return mGlslangWrapper;
837}
838
Jamie Madill4c26fc22017-02-24 11:04:10 -0500839Serial RendererVk::getCurrentQueueSerial() const
840{
841 return mCurrentQueueSerial;
842}
843
Jamie Madill97760352017-11-09 13:08:29 -0500844bool RendererVk::isResourceInUse(const ResourceVk &resource)
845{
846 return isSerialInUse(resource.getQueueSerial());
847}
848
849bool RendererVk::isSerialInUse(Serial serial)
850{
851 return serial > mLastCompletedQueueSerial;
852}
853
Jamie Madill9f2a8612017-11-30 12:43:09 -0500854vk::Error RendererVk::getCompatibleRenderPass(const vk::RenderPassDesc &desc,
855 vk::RenderPass **renderPassOut)
856{
857 return mRenderPassCache.getCompatibleRenderPass(mDevice, mCurrentQueueSerial, desc,
858 renderPassOut);
859}
860
Jamie Madillbef918c2017-12-13 13:11:30 -0500861vk::Error RendererVk::getRenderPassWithOps(const vk::RenderPassDesc &desc,
862 const vk::AttachmentOpsArray &ops,
863 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500864{
Jamie Madillbef918c2017-12-13 13:11:30 -0500865 return mRenderPassCache.getRenderPassWithOps(mDevice, mCurrentQueueSerial, desc, ops,
866 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500867}
868
Jamie Madill49ac74b2017-12-21 14:42:33 -0500869vk::CommandBufferNode *RendererVk::allocateCommandNode()
870{
871 // TODO(jmadill): Use a pool allocator for the CPU node allocations.
872 vk::CommandBufferNode *newCommands = new vk::CommandBufferNode();
873 mOpenCommandGraph.emplace_back(newCommands);
874 return newCommands;
875}
876
877vk::Error RendererVk::flushCommandGraph(const gl::Context *context, vk::CommandBuffer *commandBatch)
878{
879 VkCommandBufferAllocateInfo primaryInfo;
880 primaryInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
881 primaryInfo.pNext = nullptr;
882 primaryInfo.commandPool = mCommandPool.getHandle();
883 primaryInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
884 primaryInfo.commandBufferCount = 1;
885
886 ANGLE_TRY(commandBatch->init(mDevice, primaryInfo));
887
888 if (mOpenCommandGraph.empty())
889 {
890 return vk::NoError();
891 }
892
893 std::vector<vk::CommandBufferNode *> nodeStack;
894
895 VkCommandBufferBeginInfo beginInfo;
896 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
897 beginInfo.pNext = nullptr;
898 beginInfo.flags = 0;
899 beginInfo.pInheritanceInfo = nullptr;
900
901 ANGLE_TRY(commandBatch->begin(beginInfo));
902
903 for (vk::CommandBufferNode *topLevelNode : mOpenCommandGraph)
904 {
905 // Only process commands that don't have child commands. The others will be pulled in
906 // automatically. Also skip commands that have already been visited.
907 if (topLevelNode->isDependency() ||
908 topLevelNode->visitedState() != vk::VisitedState::Unvisited)
909 continue;
910
911 nodeStack.push_back(topLevelNode);
912
913 while (!nodeStack.empty())
914 {
915 vk::CommandBufferNode *node = nodeStack.back();
916
917 switch (node->visitedState())
918 {
919 case vk::VisitedState::Unvisited:
920 node->visitDependencies(&nodeStack);
921 break;
922 case vk::VisitedState::Ready:
923 ANGLE_TRY(node->visitAndExecute(this, commandBatch));
924 nodeStack.pop_back();
925 break;
926 case vk::VisitedState::Visited:
927 nodeStack.pop_back();
928 break;
929 default:
930 UNREACHABLE();
931 break;
932 }
933 }
934 }
935
936 ANGLE_TRY(commandBatch->end());
937 return vk::NoError();
938}
939
940void RendererVk::resetCommandGraph()
941{
942 // TODO(jmadill): Use pool allocation so we don't need to deallocate command graph.
943 for (vk::CommandBufferNode *node : mOpenCommandGraph)
944 {
945 delete node;
946 }
947 mOpenCommandGraph.clear();
948}
949
950vk::Error RendererVk::flush(const gl::Context *context,
951 const vk::Semaphore &waitSemaphore,
952 const vk::Semaphore &signalSemaphore)
953{
954 vk::CommandBuffer commandBatch;
955 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
956
957 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
958
959 VkSubmitInfo submitInfo;
960 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
961 submitInfo.pNext = nullptr;
962 submitInfo.waitSemaphoreCount = 1;
963 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
964 submitInfo.pWaitDstStageMask = &waitStageMask;
965 submitInfo.commandBufferCount = 1;
966 submitInfo.pCommandBuffers = commandBatch.ptr();
967 submitInfo.signalSemaphoreCount = 1;
968 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
969
970 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
971 return vk::NoError();
972}
973
Jamie Madill8c3988c2017-12-21 14:44:56 -0500974const vk::PipelineLayout &RendererVk::getGraphicsPipelineLayout() const
975{
976 return mGraphicsPipelineLayout;
977}
978
979const std::vector<vk::DescriptorSetLayout> &RendererVk::getGraphicsDescriptorSetLayouts() const
980{
981 return mGraphicsDescriptorSetLayouts;
982}
983
984vk::Error RendererVk::initGraphicsPipelineLayout()
985{
986 ASSERT(!mGraphicsPipelineLayout.valid());
987
988 // Create two descriptor set layouts: one for default uniform info, and one for textures.
989 // Skip one or both if there are no uniforms.
990 VkDescriptorSetLayoutBinding uniformBindings[2];
991 uint32_t blockCount = 0;
992
993 {
994 auto &layoutBinding = uniformBindings[blockCount];
995
996 layoutBinding.binding = blockCount;
997 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
998 layoutBinding.descriptorCount = 1;
999 layoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
1000 layoutBinding.pImmutableSamplers = nullptr;
1001
1002 blockCount++;
1003 }
1004
1005 {
1006 auto &layoutBinding = uniformBindings[blockCount];
1007
1008 layoutBinding.binding = blockCount;
1009 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1010 layoutBinding.descriptorCount = 1;
1011 layoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
1012 layoutBinding.pImmutableSamplers = nullptr;
1013
1014 blockCount++;
1015 }
1016
1017 {
1018 VkDescriptorSetLayoutCreateInfo uniformInfo;
1019 uniformInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1020 uniformInfo.pNext = nullptr;
1021 uniformInfo.flags = 0;
1022 uniformInfo.bindingCount = blockCount;
1023 uniformInfo.pBindings = uniformBindings;
1024
1025 vk::DescriptorSetLayout uniformLayout;
1026 ANGLE_TRY(uniformLayout.init(mDevice, uniformInfo));
1027 mGraphicsDescriptorSetLayouts.push_back(std::move(uniformLayout));
1028 }
1029
1030 std::array<VkDescriptorSetLayoutBinding, gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES>
1031 textureBindings;
1032
1033 // TODO(jmadill): This approach might not work well for texture arrays.
1034 for (uint32_t textureIndex = 0; textureIndex < gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES;
1035 ++textureIndex)
1036 {
1037 VkDescriptorSetLayoutBinding &layoutBinding = textureBindings[textureIndex];
1038
1039 layoutBinding.binding = textureIndex;
1040 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1041 layoutBinding.descriptorCount = 1;
1042 layoutBinding.stageFlags = (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
1043 layoutBinding.pImmutableSamplers = nullptr;
1044 }
1045
1046 {
1047 VkDescriptorSetLayoutCreateInfo textureInfo;
1048 textureInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1049 textureInfo.pNext = nullptr;
1050 textureInfo.flags = 0;
1051 textureInfo.bindingCount = static_cast<uint32_t>(textureBindings.size());
1052 textureInfo.pBindings = textureBindings.data();
1053
1054 vk::DescriptorSetLayout textureLayout;
1055 ANGLE_TRY(textureLayout.init(mDevice, textureInfo));
1056 mGraphicsDescriptorSetLayouts.push_back(std::move(textureLayout));
1057 }
1058
1059 VkPipelineLayoutCreateInfo createInfo;
1060 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1061 createInfo.pNext = nullptr;
1062 createInfo.flags = 0;
1063 createInfo.setLayoutCount = static_cast<uint32_t>(mGraphicsDescriptorSetLayouts.size());
1064 createInfo.pSetLayouts = mGraphicsDescriptorSetLayouts[0].ptr();
1065 createInfo.pushConstantRangeCount = 0;
1066 createInfo.pPushConstantRanges = nullptr;
1067
1068 ANGLE_TRY(mGraphicsPipelineLayout.init(mDevice, createInfo));
1069
1070 return vk::NoError();
1071}
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001072} // namespace rx