blob: 74c6bfa5348d98484007cb0594f914b14fa94ca7 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
Jamie Madill3c424b42018-01-19 12:35:09 -050013#include "libANGLE/renderer/vulkan/vk_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050014
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madill49ac74b2017-12-21 14:42:33 -050020#include "libANGLE/renderer/vulkan/CommandBufferNode.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050021#include "libANGLE/renderer/vulkan/CompilerVk.h"
22#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050023#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madillffa4cbb2018-01-23 13:04:07 -050024#include "libANGLE/renderer/vulkan/ProgramVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050025#include "libANGLE/renderer/vulkan/TextureVk.h"
26#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Luc Ferrone4741fd2018-01-25 13:25:27 -050027#include "libANGLE/renderer/vulkan/vk_caps_utils.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050028#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050029#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040030
31namespace rx
32{
33
Jamie Madille09bd5d2016-11-29 16:20:35 -050034namespace
35{
36
37VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
38 const std::vector<const char *> &enabledExtensionNames)
39{
40 // Compile the extensions names into a set.
41 std::set<std::string> extensionNames;
42 for (const auto &extensionProp : extensionProps)
43 {
44 extensionNames.insert(extensionProp.extensionName);
45 }
46
Jamie Madillacf2f3a2017-11-21 19:22:44 -050047 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050048 {
49 if (extensionNames.count(extensionName) == 0)
50 {
51 return VK_ERROR_EXTENSION_NOT_PRESENT;
52 }
53 }
54
55 return VK_SUCCESS;
56}
57
Yuly Novikov199f4292018-01-19 19:04:05 -050058VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
59 VkDebugReportObjectTypeEXT objectType,
60 uint64_t object,
61 size_t location,
62 int32_t messageCode,
63 const char *layerPrefix,
64 const char *message,
65 void *userData)
Jamie Madill0448ec82016-12-23 13:41:47 -050066{
67 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
68 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050069 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050070#if !defined(NDEBUG)
71 // Abort the call in Debug builds.
72 return VK_TRUE;
73#endif
74 }
75 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
76 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050077 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050078 }
79 else
80 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050081 // Uncomment this if you want Vulkan spam.
82 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050083 }
84
85 return VK_FALSE;
86}
87
Yuly Novikov199f4292018-01-19 19:04:05 -050088// If we're loading the validation layers, we could be running from any random directory.
89// Change to the executable directory so we can find the layers, then change back to the
90// previous directory to be safe we don't disrupt the application.
91class ScopedVkLoaderEnvironment : angle::NonCopyable
92{
93 public:
94 ScopedVkLoaderEnvironment(bool enableValidationLayers)
95 : mEnableValidationLayers(enableValidationLayers), mChangedCWD(false)
96 {
97// Changing CWD and setting environment variables makes no sense on Android,
98// since this code is a part of Java application there.
99// Android Vulkan loader doesn't need this either.
100#if !defined(ANGLE_PLATFORM_ANDROID)
101 if (mEnableValidationLayers)
102 {
103 const auto &cwd = angle::GetCWD();
104 if (!cwd.valid())
105 {
106 ERR() << "Error getting CWD for Vulkan layers init.";
107 mEnableValidationLayers = false;
108 }
109 else
110 {
111 mPreviousCWD = cwd.value();
112 const char *exeDir = angle::GetExecutableDirectory();
113 mChangedCWD = angle::SetCWD(exeDir);
114 if (!mChangedCWD)
115 {
116 ERR() << "Error setting CWD for Vulkan layers init.";
117 mEnableValidationLayers = false;
118 }
119 }
120 }
121
122 // Override environment variable to use the ANGLE layers.
123 if (mEnableValidationLayers)
124 {
jchen10046fa0e2018-02-02 14:51:36 +0800125 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
Yuly Novikov199f4292018-01-19 19:04:05 -0500126 {
127 ERR() << "Error setting environment for Vulkan layers init.";
128 mEnableValidationLayers = false;
129 }
130 }
131#endif // !defined(ANGLE_PLATFORM_ANDROID)
132 }
133
134 ~ScopedVkLoaderEnvironment()
135 {
136 if (mChangedCWD)
137 {
138#if !defined(ANGLE_PLATFORM_ANDROID)
139 ASSERT(mPreviousCWD.valid());
140 angle::SetCWD(mPreviousCWD.value().c_str());
141#endif // !defined(ANGLE_PLATFORM_ANDROID)
142 }
143 }
144
145 bool canEnableValidationLayers() { return mEnableValidationLayers; }
146
147 private:
148 bool mEnableValidationLayers;
149 bool mChangedCWD;
150 Optional<std::string> mPreviousCWD;
151};
152
Jamie Madille09bd5d2016-11-29 16:20:35 -0500153} // anonymous namespace
154
Jamie Madill49ac74b2017-12-21 14:42:33 -0500155// CommandBatch implementation.
156RendererVk::CommandBatch::CommandBatch()
157{
158}
159
160RendererVk::CommandBatch::~CommandBatch()
161{
162}
163
164RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
165 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
166{
167}
168
169RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
170{
171 std::swap(commandPool, other.commandPool);
172 std::swap(fence, other.fence);
173 std::swap(serial, other.serial);
174 return *this;
175}
176
Jamie Madill9f2a8612017-11-30 12:43:09 -0500177// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500178RendererVk::RendererVk()
179 : mCapsInitialized(false),
180 mInstance(VK_NULL_HANDLE),
181 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500182 mDebugReportCallback(VK_NULL_HANDLE),
183 mPhysicalDevice(VK_NULL_HANDLE),
184 mQueue(VK_NULL_HANDLE),
185 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
186 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500187 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400188 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
189 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill49ac74b2017-12-21 14:42:33 -0500190 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400191{
192}
193
194RendererVk::~RendererVk()
195{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500196 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500197 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500198 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
199 vk::Error error = finish(nullptr);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500200 if (error.isError())
201 {
202 ERR() << "Error during VK shutdown: " << error;
203 }
204 }
205
Jamie Madill8c3988c2017-12-21 14:44:56 -0500206 for (auto &descriptorSetLayout : mGraphicsDescriptorSetLayouts)
207 {
208 descriptorSetLayout.destroy(mDevice);
209 }
210
211 mGraphicsPipelineLayout.destroy(mDevice);
212
Jamie Madill9f2a8612017-11-30 12:43:09 -0500213 mRenderPassCache.destroy(mDevice);
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500214 mPipelineCache.destroy(mDevice);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500215
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500216 if (mGlslangWrapper)
217 {
218 GlslangWrapper::ReleaseReference();
219 mGlslangWrapper = nullptr;
220 }
221
Jamie Madill5deea722017-02-16 10:44:46 -0500222 if (mCommandPool.valid())
223 {
224 mCommandPool.destroy(mDevice);
225 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500226
227 if (mDevice)
228 {
229 vkDestroyDevice(mDevice, nullptr);
230 mDevice = VK_NULL_HANDLE;
231 }
232
Jamie Madill0448ec82016-12-23 13:41:47 -0500233 if (mDebugReportCallback)
234 {
235 ASSERT(mInstance);
236 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
237 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
238 ASSERT(destroyDebugReportCallback);
239 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
240 }
241
Jamie Madill4d0bf552016-12-28 15:45:24 -0500242 if (mInstance)
243 {
244 vkDestroyInstance(mInstance, nullptr);
245 mInstance = VK_NULL_HANDLE;
246 }
247
248 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500249}
250
Frank Henigman29f148b2016-11-23 21:05:36 -0500251vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500252{
Jamie Madilldf9ad2b2018-02-02 12:40:01 -0500253 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs));
Yuly Novikov199f4292018-01-19 19:04:05 -0500254 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
Jamie Madilla66779f2017-01-06 10:43:44 -0500255
Jamie Madill0448ec82016-12-23 13:41:47 -0500256 // Gather global layer properties.
257 uint32_t instanceLayerCount = 0;
258 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
259
260 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
261 if (instanceLayerCount > 0)
262 {
263 ANGLE_VK_TRY(
264 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
265 }
266
Jamie Madille09bd5d2016-11-29 16:20:35 -0500267 uint32_t instanceExtensionCount = 0;
268 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
269
270 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
271 if (instanceExtensionCount > 0)
272 {
273 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
274 instanceExtensionProps.data()));
275 }
276
Yuly Novikov199f4292018-01-19 19:04:05 -0500277 const char *const *enabledLayerNames = nullptr;
278 uint32_t enabledLayerCount = 0;
Jamie Madill0448ec82016-12-23 13:41:47 -0500279 if (mEnableValidationLayers)
280 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500281 bool layersRequested =
282 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
283 mEnableValidationLayers = GetAvailableValidationLayers(
284 instanceLayerProps, layersRequested, &enabledLayerNames, &enabledLayerCount);
Jamie Madill0448ec82016-12-23 13:41:47 -0500285 }
286
Jamie Madille09bd5d2016-11-29 16:20:35 -0500287 std::vector<const char *> enabledInstanceExtensions;
288 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500289 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500290
Jamie Madill0448ec82016-12-23 13:41:47 -0500291 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
292 if (mEnableValidationLayers)
293 {
294 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
295 }
296
Jamie Madille09bd5d2016-11-29 16:20:35 -0500297 // Verify the required extensions are in the extension names set. Fail if not.
298 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
299
Jamie Madill327ba852016-11-30 12:38:28 -0500300 VkApplicationInfo applicationInfo;
301 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
302 applicationInfo.pNext = nullptr;
303 applicationInfo.pApplicationName = "ANGLE";
304 applicationInfo.applicationVersion = 1;
305 applicationInfo.pEngineName = "ANGLE";
306 applicationInfo.engineVersion = 1;
307 applicationInfo.apiVersion = VK_API_VERSION_1_0;
308
309 VkInstanceCreateInfo instanceInfo;
310 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
311 instanceInfo.pNext = nullptr;
312 instanceInfo.flags = 0;
313 instanceInfo.pApplicationInfo = &applicationInfo;
314
Jamie Madille09bd5d2016-11-29 16:20:35 -0500315 // Enable requested layers and extensions.
316 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
317 instanceInfo.ppEnabledExtensionNames =
318 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Yuly Novikov199f4292018-01-19 19:04:05 -0500319 instanceInfo.enabledLayerCount = enabledLayerCount;
320 instanceInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill327ba852016-11-30 12:38:28 -0500321
322 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
323
Jamie Madill0448ec82016-12-23 13:41:47 -0500324 if (mEnableValidationLayers)
325 {
326 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
327
328 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
329 debugReportInfo.pNext = nullptr;
330 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
331 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
332 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
333 debugReportInfo.pfnCallback = &DebugReportCallback;
334 debugReportInfo.pUserData = this;
335
336 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
337 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
338 ASSERT(createDebugReportCallback);
339 ANGLE_VK_TRY(
340 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
341 }
342
Jamie Madill4d0bf552016-12-28 15:45:24 -0500343 uint32_t physicalDeviceCount = 0;
344 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
345 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
346
347 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
348 physicalDeviceCount = 1;
349 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
350
351 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
352
353 // Ensure we can find a graphics queue family.
354 uint32_t queueCount = 0;
355 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
356
357 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
358
359 mQueueFamilyProperties.resize(queueCount);
360 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
361 mQueueFamilyProperties.data());
362
363 size_t graphicsQueueFamilyCount = false;
364 uint32_t firstGraphicsQueueFamily = 0;
365 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
366 {
367 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
368 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
369 {
370 ASSERT(queueInfo.queueCount > 0);
371 graphicsQueueFamilyCount++;
372 if (firstGraphicsQueueFamily == 0)
373 {
374 firstGraphicsQueueFamily = familyIndex;
375 }
376 break;
377 }
378 }
379
380 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
381
382 // If only one queue family, go ahead and initialize the device. If there is more than one
383 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
384 if (graphicsQueueFamilyCount == 1)
385 {
386 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
387 }
388
Jamie Madill035fd6b2017-10-03 15:43:22 -0400389 // Store the physical device memory properties so we can find the right memory pools.
390 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500391
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500392 mGlslangWrapper = GlslangWrapper::GetReference();
393
Jamie Madill6a89d222017-11-02 11:59:51 -0400394 // Initialize the format table.
395 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
396
Jamie Madill8c3988c2017-12-21 14:44:56 -0500397 // Initialize the pipeline layout for GL programs.
398 ANGLE_TRY(initGraphicsPipelineLayout());
399
Jamie Madill327ba852016-11-30 12:38:28 -0500400 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400401}
402
Jamie Madill4d0bf552016-12-28 15:45:24 -0500403vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
404{
405 uint32_t deviceLayerCount = 0;
406 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
407
408 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
409 if (deviceLayerCount > 0)
410 {
411 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
412 deviceLayerProps.data()));
413 }
414
415 uint32_t deviceExtensionCount = 0;
416 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
417 &deviceExtensionCount, nullptr));
418
419 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
420 if (deviceExtensionCount > 0)
421 {
422 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
423 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
424 }
425
Yuly Novikov199f4292018-01-19 19:04:05 -0500426 const char *const *enabledLayerNames = nullptr;
427 uint32_t enabledLayerCount = 0;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500428 if (mEnableValidationLayers)
429 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500430 mEnableValidationLayers = GetAvailableValidationLayers(
431 deviceLayerProps, false, &enabledLayerNames, &enabledLayerCount);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500432 }
433
434 std::vector<const char *> enabledDeviceExtensions;
435 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
436
437 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
438
439 VkDeviceQueueCreateInfo queueCreateInfo;
440
441 float zeroPriority = 0.0f;
442
443 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
444 queueCreateInfo.pNext = nullptr;
445 queueCreateInfo.flags = 0;
446 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
447 queueCreateInfo.queueCount = 1;
448 queueCreateInfo.pQueuePriorities = &zeroPriority;
449
450 // Initialize the device
451 VkDeviceCreateInfo createInfo;
452
453 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
454 createInfo.pNext = nullptr;
455 createInfo.flags = 0;
456 createInfo.queueCreateInfoCount = 1;
457 createInfo.pQueueCreateInfos = &queueCreateInfo;
Yuly Novikov199f4292018-01-19 19:04:05 -0500458 createInfo.enabledLayerCount = enabledLayerCount;
459 createInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500460 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
461 createInfo.ppEnabledExtensionNames =
462 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
463 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
464
465 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
466
467 mCurrentQueueFamilyIndex = queueFamilyIndex;
468
469 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
470
471 // Initialize the command pool now that we know the queue family index.
472 VkCommandPoolCreateInfo commandPoolInfo;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500473 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
474 commandPoolInfo.pNext = nullptr;
475 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500476 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
477
Jamie Madill5deea722017-02-16 10:44:46 -0500478 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500479
Jamie Madill4d0bf552016-12-28 15:45:24 -0500480 return vk::NoError();
481}
482
483vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
484{
485 // We've already initialized a device, and can't re-create it unless it's never been used.
486 // TODO(jmadill): Handle the re-creation case if necessary.
487 if (mDevice != VK_NULL_HANDLE)
488 {
489 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
490
491 // Check if the current device supports present on this surface.
492 VkBool32 supportsPresent = VK_FALSE;
493 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
494 surface, &supportsPresent));
495
496 return (supportsPresent == VK_TRUE);
497 }
498
499 // Find a graphics and present queue.
500 Optional<uint32_t> newPresentQueue;
501 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
502 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
503 {
504 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
505 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
506 {
507 VkBool32 supportsPresent = VK_FALSE;
508 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
509 &supportsPresent));
510
511 if (supportsPresent == VK_TRUE)
512 {
513 newPresentQueue = queueIndex;
514 break;
515 }
516 }
517 }
518
519 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
520 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
521
522 return newPresentQueue.value();
523}
524
525std::string RendererVk::getVendorString() const
526{
527 switch (mPhysicalDeviceProperties.vendorID)
528 {
529 case VENDOR_ID_AMD:
530 return "Advanced Micro Devices";
531 case VENDOR_ID_NVIDIA:
532 return "NVIDIA";
533 case VENDOR_ID_INTEL:
534 return "Intel";
535 default:
536 {
537 // TODO(jmadill): More vendor IDs.
538 std::stringstream strstr;
539 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
540 return strstr.str();
541 }
542 }
543}
544
Jamie Madille09bd5d2016-11-29 16:20:35 -0500545std::string RendererVk::getRendererDescription() const
546{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500547 std::stringstream strstr;
548
549 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
550
551 strstr << "Vulkan ";
552 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
553 strstr << VK_VERSION_MINOR(apiVersion) << ".";
554 strstr << VK_VERSION_PATCH(apiVersion);
555
556 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
557
558 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500559}
560
Jamie Madillacccc6c2016-05-03 17:22:10 -0400561void RendererVk::ensureCapsInitialized() const
562{
563 if (!mCapsInitialized)
564 {
Luc Ferrone4741fd2018-01-25 13:25:27 -0500565 vk::GenerateCaps(mPhysicalDeviceProperties, &mNativeCaps, &mNativeTextureCaps,
566 &mNativeExtensions, &mNativeLimitations);
Jamie Madillacccc6c2016-05-03 17:22:10 -0400567 mCapsInitialized = true;
568 }
569}
570
Jamie Madillacccc6c2016-05-03 17:22:10 -0400571const gl::Caps &RendererVk::getNativeCaps() const
572{
573 ensureCapsInitialized();
574 return mNativeCaps;
575}
576
577const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
578{
579 ensureCapsInitialized();
580 return mNativeTextureCaps;
581}
582
583const gl::Extensions &RendererVk::getNativeExtensions() const
584{
585 ensureCapsInitialized();
586 return mNativeExtensions;
587}
588
589const gl::Limitations &RendererVk::getNativeLimitations() const
590{
591 ensureCapsInitialized();
592 return mNativeLimitations;
593}
594
Jamie Madill49ac74b2017-12-21 14:42:33 -0500595const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500596{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500597 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500598}
599
Jamie Madill49ac74b2017-12-21 14:42:33 -0500600vk::Error RendererVk::finish(const gl::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500601{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500602 if (!mOpenCommandGraph.empty())
603 {
604 vk::CommandBuffer commandBatch;
605 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400606
Jamie Madill49ac74b2017-12-21 14:42:33 -0500607 VkSubmitInfo submitInfo;
608 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
609 submitInfo.pNext = nullptr;
610 submitInfo.waitSemaphoreCount = 0;
611 submitInfo.pWaitSemaphores = nullptr;
612 submitInfo.pWaitDstStageMask = nullptr;
613 submitInfo.commandBufferCount = 1;
614 submitInfo.pCommandBuffers = commandBatch.ptr();
615 submitInfo.signalSemaphoreCount = 0;
616 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500617
Jamie Madill49ac74b2017-12-21 14:42:33 -0500618 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
619 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500620
Jamie Madill4c26fc22017-02-24 11:04:10 -0500621 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500622 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400623 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500624 return vk::NoError();
625}
626
Jamie Madill0c0dc342017-03-24 14:18:51 -0400627void RendererVk::freeAllInFlightResources()
628{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500629 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -0400630 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500631 batch.fence.destroy(mDevice);
632 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400633 }
634 mInFlightCommands.clear();
635
636 for (auto &garbage : mGarbage)
637 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400638 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400639 }
640 mGarbage.clear();
641}
642
Jamie Madill4c26fc22017-02-24 11:04:10 -0500643vk::Error RendererVk::checkInFlightCommands()
644{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500645 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500646
Jamie Madill49ac74b2017-12-21 14:42:33 -0500647 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500648 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500649 VkResult result = batch.fence.getStatus(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400650 if (result == VK_NOT_READY)
651 break;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500652
Jamie Madill0c0dc342017-03-24 14:18:51 -0400653 ANGLE_VK_TRY(result);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500654 ASSERT(batch.serial > mLastCompletedQueueSerial);
655 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400656
Jamie Madill49ac74b2017-12-21 14:42:33 -0500657 batch.fence.destroy(mDevice);
658 batch.commandPool.destroy(mDevice);
659 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500660 }
661
Jamie Madill49ac74b2017-12-21 14:42:33 -0500662 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400663
664 size_t freeIndex = 0;
665 for (; freeIndex < mGarbage.size(); ++freeIndex)
666 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500667 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400668 break;
669 }
670
671 // Remove the entries from the garbage list - they should be ready to go.
672 if (freeIndex > 0)
673 {
674 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500675 }
676
Jamie Madill4c26fc22017-02-24 11:04:10 -0500677 return vk::NoError();
678}
679
Jamie Madill49ac74b2017-12-21 14:42:33 -0500680vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo, vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500681{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500682 VkFenceCreateInfo fenceInfo;
683 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
684 fenceInfo.pNext = nullptr;
685 fenceInfo.flags = 0;
686
687 CommandBatch batch;
688 ANGLE_TRY(batch.fence.init(mDevice, fenceInfo));
689
690 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500691
692 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500693 batch.commandPool = std::move(mCommandPool);
694 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500695
Jamie Madill49ac74b2017-12-21 14:42:33 -0500696 mInFlightCommands.emplace_back(std::move(batch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400697
698 // Sanity check.
699 ASSERT(mInFlightCommands.size() < 1000u);
700
701 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400702 // TODO(jmadill): Overflow check.
703 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400704
705 ANGLE_TRY(checkInFlightCommands());
706
Jamie Madill49ac74b2017-12-21 14:42:33 -0500707 // Simply null out the command buffer here - it was allocated using the command pool.
708 commandBuffer.releaseHandle();
709
710 // Reallocate the command pool for next frame.
711 // TODO(jmadill): Consider reusing command pools.
712 VkCommandPoolCreateInfo poolInfo;
713 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
714 poolInfo.pNext = nullptr;
715 poolInfo.flags = 0;
716 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
717
718 mCommandPool.init(mDevice, poolInfo);
719
Jamie Madill4c26fc22017-02-24 11:04:10 -0500720 return vk::NoError();
721}
722
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500723GlslangWrapper *RendererVk::getGlslangWrapper()
724{
725 return mGlslangWrapper;
726}
727
Jamie Madill4c26fc22017-02-24 11:04:10 -0500728Serial RendererVk::getCurrentQueueSerial() const
729{
730 return mCurrentQueueSerial;
731}
732
Jamie Madill97760352017-11-09 13:08:29 -0500733bool RendererVk::isResourceInUse(const ResourceVk &resource)
734{
735 return isSerialInUse(resource.getQueueSerial());
736}
737
738bool RendererVk::isSerialInUse(Serial serial)
739{
740 return serial > mLastCompletedQueueSerial;
741}
742
Jamie Madill9f2a8612017-11-30 12:43:09 -0500743vk::Error RendererVk::getCompatibleRenderPass(const vk::RenderPassDesc &desc,
744 vk::RenderPass **renderPassOut)
745{
746 return mRenderPassCache.getCompatibleRenderPass(mDevice, mCurrentQueueSerial, desc,
747 renderPassOut);
748}
749
Jamie Madillbef918c2017-12-13 13:11:30 -0500750vk::Error RendererVk::getRenderPassWithOps(const vk::RenderPassDesc &desc,
751 const vk::AttachmentOpsArray &ops,
752 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500753{
Jamie Madillbef918c2017-12-13 13:11:30 -0500754 return mRenderPassCache.getRenderPassWithOps(mDevice, mCurrentQueueSerial, desc, ops,
755 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500756}
757
Jamie Madill49ac74b2017-12-21 14:42:33 -0500758vk::CommandBufferNode *RendererVk::allocateCommandNode()
759{
760 // TODO(jmadill): Use a pool allocator for the CPU node allocations.
761 vk::CommandBufferNode *newCommands = new vk::CommandBufferNode();
762 mOpenCommandGraph.emplace_back(newCommands);
763 return newCommands;
764}
765
766vk::Error RendererVk::flushCommandGraph(const gl::Context *context, vk::CommandBuffer *commandBatch)
767{
768 VkCommandBufferAllocateInfo primaryInfo;
769 primaryInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
770 primaryInfo.pNext = nullptr;
771 primaryInfo.commandPool = mCommandPool.getHandle();
772 primaryInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
773 primaryInfo.commandBufferCount = 1;
774
775 ANGLE_TRY(commandBatch->init(mDevice, primaryInfo));
776
777 if (mOpenCommandGraph.empty())
778 {
779 return vk::NoError();
780 }
781
782 std::vector<vk::CommandBufferNode *> nodeStack;
783
784 VkCommandBufferBeginInfo beginInfo;
785 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
786 beginInfo.pNext = nullptr;
787 beginInfo.flags = 0;
788 beginInfo.pInheritanceInfo = nullptr;
789
790 ANGLE_TRY(commandBatch->begin(beginInfo));
791
792 for (vk::CommandBufferNode *topLevelNode : mOpenCommandGraph)
793 {
794 // Only process commands that don't have child commands. The others will be pulled in
795 // automatically. Also skip commands that have already been visited.
Jamie Madill0e654542018-02-07 14:50:06 -0500796 if (topLevelNode->hasHappensAfterDependencies() ||
Jamie Madill49ac74b2017-12-21 14:42:33 -0500797 topLevelNode->visitedState() != vk::VisitedState::Unvisited)
798 continue;
799
800 nodeStack.push_back(topLevelNode);
801
802 while (!nodeStack.empty())
803 {
804 vk::CommandBufferNode *node = nodeStack.back();
805
806 switch (node->visitedState())
807 {
808 case vk::VisitedState::Unvisited:
809 node->visitDependencies(&nodeStack);
810 break;
811 case vk::VisitedState::Ready:
812 ANGLE_TRY(node->visitAndExecute(this, commandBatch));
813 nodeStack.pop_back();
814 break;
815 case vk::VisitedState::Visited:
816 nodeStack.pop_back();
817 break;
818 default:
819 UNREACHABLE();
820 break;
821 }
822 }
823 }
824
825 ANGLE_TRY(commandBatch->end());
Jamie Madill97f39b32018-01-05 13:14:29 -0500826 resetCommandGraph();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500827 return vk::NoError();
828}
829
830void RendererVk::resetCommandGraph()
831{
832 // TODO(jmadill): Use pool allocation so we don't need to deallocate command graph.
833 for (vk::CommandBufferNode *node : mOpenCommandGraph)
834 {
835 delete node;
836 }
837 mOpenCommandGraph.clear();
838}
839
840vk::Error RendererVk::flush(const gl::Context *context,
841 const vk::Semaphore &waitSemaphore,
842 const vk::Semaphore &signalSemaphore)
843{
844 vk::CommandBuffer commandBatch;
845 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
846
847 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
848
849 VkSubmitInfo submitInfo;
850 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
851 submitInfo.pNext = nullptr;
852 submitInfo.waitSemaphoreCount = 1;
853 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
854 submitInfo.pWaitDstStageMask = &waitStageMask;
855 submitInfo.commandBufferCount = 1;
856 submitInfo.pCommandBuffers = commandBatch.ptr();
857 submitInfo.signalSemaphoreCount = 1;
858 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
859
860 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
861 return vk::NoError();
862}
863
Jamie Madill8c3988c2017-12-21 14:44:56 -0500864const vk::PipelineLayout &RendererVk::getGraphicsPipelineLayout() const
865{
866 return mGraphicsPipelineLayout;
867}
868
869const std::vector<vk::DescriptorSetLayout> &RendererVk::getGraphicsDescriptorSetLayouts() const
870{
871 return mGraphicsDescriptorSetLayouts;
872}
873
874vk::Error RendererVk::initGraphicsPipelineLayout()
875{
876 ASSERT(!mGraphicsPipelineLayout.valid());
877
878 // Create two descriptor set layouts: one for default uniform info, and one for textures.
879 // Skip one or both if there are no uniforms.
880 VkDescriptorSetLayoutBinding uniformBindings[2];
881 uint32_t blockCount = 0;
882
883 {
884 auto &layoutBinding = uniformBindings[blockCount];
885
886 layoutBinding.binding = blockCount;
887 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
888 layoutBinding.descriptorCount = 1;
889 layoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
890 layoutBinding.pImmutableSamplers = nullptr;
891
892 blockCount++;
893 }
894
895 {
896 auto &layoutBinding = uniformBindings[blockCount];
897
898 layoutBinding.binding = blockCount;
899 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
900 layoutBinding.descriptorCount = 1;
901 layoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
902 layoutBinding.pImmutableSamplers = nullptr;
903
904 blockCount++;
905 }
906
907 {
908 VkDescriptorSetLayoutCreateInfo uniformInfo;
909 uniformInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
910 uniformInfo.pNext = nullptr;
911 uniformInfo.flags = 0;
912 uniformInfo.bindingCount = blockCount;
913 uniformInfo.pBindings = uniformBindings;
914
915 vk::DescriptorSetLayout uniformLayout;
916 ANGLE_TRY(uniformLayout.init(mDevice, uniformInfo));
917 mGraphicsDescriptorSetLayouts.push_back(std::move(uniformLayout));
918 }
919
Yuly Novikov37968132018-01-23 18:19:29 -0500920 // TODO(lucferron): expose this limitation to GL in Context Caps
921 std::vector<VkDescriptorSetLayoutBinding> textureBindings(
922 std::min<size_t>(mPhysicalDeviceProperties.limits.maxPerStageDescriptorSamplers,
923 gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES));
Jamie Madill8c3988c2017-12-21 14:44:56 -0500924
925 // TODO(jmadill): This approach might not work well for texture arrays.
Yuly Novikov37968132018-01-23 18:19:29 -0500926 for (uint32_t textureIndex = 0; textureIndex < textureBindings.size(); ++textureIndex)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500927 {
928 VkDescriptorSetLayoutBinding &layoutBinding = textureBindings[textureIndex];
929
930 layoutBinding.binding = textureIndex;
931 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
932 layoutBinding.descriptorCount = 1;
933 layoutBinding.stageFlags = (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
934 layoutBinding.pImmutableSamplers = nullptr;
935 }
936
937 {
938 VkDescriptorSetLayoutCreateInfo textureInfo;
939 textureInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
940 textureInfo.pNext = nullptr;
941 textureInfo.flags = 0;
942 textureInfo.bindingCount = static_cast<uint32_t>(textureBindings.size());
943 textureInfo.pBindings = textureBindings.data();
944
945 vk::DescriptorSetLayout textureLayout;
946 ANGLE_TRY(textureLayout.init(mDevice, textureInfo));
947 mGraphicsDescriptorSetLayouts.push_back(std::move(textureLayout));
948 }
949
950 VkPipelineLayoutCreateInfo createInfo;
951 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
952 createInfo.pNext = nullptr;
953 createInfo.flags = 0;
954 createInfo.setLayoutCount = static_cast<uint32_t>(mGraphicsDescriptorSetLayouts.size());
955 createInfo.pSetLayouts = mGraphicsDescriptorSetLayouts[0].ptr();
956 createInfo.pushConstantRangeCount = 0;
957 createInfo.pPushConstantRanges = nullptr;
958
959 ANGLE_TRY(mGraphicsPipelineLayout.init(mDevice, createInfo));
960
961 return vk::NoError();
962}
Jamie Madillf2f6d372018-01-10 21:37:23 -0500963
964Serial RendererVk::issueProgramSerial()
965{
966 return mProgramSerialFactory.generate();
967}
968
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500969vk::Error RendererVk::getPipeline(const ProgramVk *programVk,
970 const vk::PipelineDesc &desc,
Luc Ferronceb71902018-02-05 15:18:47 -0500971 const gl::AttributesMask &activeAttribLocationsMask,
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500972 vk::PipelineAndSerial **pipelineOut)
973{
974 ASSERT(programVk->getVertexModuleSerial() == desc.getShaderStageInfo()[0].moduleSerial);
975 ASSERT(programVk->getFragmentModuleSerial() == desc.getShaderStageInfo()[1].moduleSerial);
976
977 // Pull in a compatible RenderPass.
978 vk::RenderPass *compatibleRenderPass = nullptr;
979 ANGLE_TRY(getCompatibleRenderPass(desc.getRenderPassDesc(), &compatibleRenderPass));
980
981 return mPipelineCache.getPipeline(mDevice, *compatibleRenderPass, mGraphicsPipelineLayout,
Luc Ferronceb71902018-02-05 15:18:47 -0500982 activeAttribLocationsMask, programVk->getLinkedVertexModule(),
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500983 programVk->getLinkedFragmentModule(), desc, pipelineOut);
984}
985
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400986} // namespace rx