blob: b83276de7e7c5868ebf43ce55dc4572031096a1c [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
Jamie Madill3c424b42018-01-19 12:35:09 -050013#include "libANGLE/renderer/vulkan/vk_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050014
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050020#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050021#include "libANGLE/renderer/vulkan/CompilerVk.h"
22#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050023#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madillffa4cbb2018-01-23 13:04:07 -050024#include "libANGLE/renderer/vulkan/ProgramVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050025#include "libANGLE/renderer/vulkan/TextureVk.h"
26#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Luc Ferrone4741fd2018-01-25 13:25:27 -050027#include "libANGLE/renderer/vulkan/vk_caps_utils.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050028#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050029#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040030
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070031// Consts
32namespace
33{
34const uint32_t kMockVendorID = 0xba5eba11;
35const uint32_t kMockDeviceID = 0xf005ba11;
36constexpr char kMockDeviceName[] = "Vulkan Mock Device";
37} // anonymous namespace
38
Jamie Madill9e54b5a2016-05-25 12:57:39 -040039namespace rx
40{
41
Jamie Madille09bd5d2016-11-29 16:20:35 -050042namespace
43{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040044// We currently only allocate 2 uniform buffer per descriptor set, one for the fragment shader and
45// one for the vertex shader.
46constexpr size_t kUniformBufferDescriptorsPerDescriptorSet = 2;
Jamie Madille09bd5d2016-11-29 16:20:35 -050047
48VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
49 const std::vector<const char *> &enabledExtensionNames)
50{
51 // Compile the extensions names into a set.
52 std::set<std::string> extensionNames;
53 for (const auto &extensionProp : extensionProps)
54 {
55 extensionNames.insert(extensionProp.extensionName);
56 }
57
Jamie Madillacf2f3a2017-11-21 19:22:44 -050058 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050059 {
60 if (extensionNames.count(extensionName) == 0)
61 {
62 return VK_ERROR_EXTENSION_NOT_PRESENT;
63 }
64 }
65
66 return VK_SUCCESS;
67}
68
Yuly Novikov199f4292018-01-19 19:04:05 -050069VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
70 VkDebugReportObjectTypeEXT objectType,
71 uint64_t object,
72 size_t location,
73 int32_t messageCode,
74 const char *layerPrefix,
75 const char *message,
76 void *userData)
Jamie Madill0448ec82016-12-23 13:41:47 -050077{
78 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
79 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050080 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050081#if !defined(NDEBUG)
82 // Abort the call in Debug builds.
83 return VK_TRUE;
84#endif
85 }
86 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
87 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050088 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050089 }
90 else
91 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050092 // Uncomment this if you want Vulkan spam.
93 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050094 }
95
96 return VK_FALSE;
97}
98
Yuly Novikov199f4292018-01-19 19:04:05 -050099// If we're loading the validation layers, we could be running from any random directory.
100// Change to the executable directory so we can find the layers, then change back to the
101// previous directory to be safe we don't disrupt the application.
102class ScopedVkLoaderEnvironment : angle::NonCopyable
103{
104 public:
105 ScopedVkLoaderEnvironment(bool enableValidationLayers)
106 : mEnableValidationLayers(enableValidationLayers), mChangedCWD(false)
107 {
108// Changing CWD and setting environment variables makes no sense on Android,
109// since this code is a part of Java application there.
110// Android Vulkan loader doesn't need this either.
111#if !defined(ANGLE_PLATFORM_ANDROID)
112 if (mEnableValidationLayers)
113 {
114 const auto &cwd = angle::GetCWD();
115 if (!cwd.valid())
116 {
117 ERR() << "Error getting CWD for Vulkan layers init.";
118 mEnableValidationLayers = false;
119 }
120 else
121 {
122 mPreviousCWD = cwd.value();
123 const char *exeDir = angle::GetExecutableDirectory();
124 mChangedCWD = angle::SetCWD(exeDir);
125 if (!mChangedCWD)
126 {
127 ERR() << "Error setting CWD for Vulkan layers init.";
128 mEnableValidationLayers = false;
129 }
130 }
131 }
132
133 // Override environment variable to use the ANGLE layers.
134 if (mEnableValidationLayers)
135 {
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700136 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_DATA_DIR))
Yuly Novikov199f4292018-01-19 19:04:05 -0500137 {
138 ERR() << "Error setting environment for Vulkan layers init.";
139 mEnableValidationLayers = false;
140 }
141 }
142#endif // !defined(ANGLE_PLATFORM_ANDROID)
143 }
144
145 ~ScopedVkLoaderEnvironment()
146 {
147 if (mChangedCWD)
148 {
149#if !defined(ANGLE_PLATFORM_ANDROID)
150 ASSERT(mPreviousCWD.valid());
151 angle::SetCWD(mPreviousCWD.value().c_str());
152#endif // !defined(ANGLE_PLATFORM_ANDROID)
153 }
154 }
155
156 bool canEnableValidationLayers() { return mEnableValidationLayers; }
157
158 private:
159 bool mEnableValidationLayers;
160 bool mChangedCWD;
161 Optional<std::string> mPreviousCWD;
162};
163
Jamie Madille09bd5d2016-11-29 16:20:35 -0500164} // anonymous namespace
165
Jamie Madill49ac74b2017-12-21 14:42:33 -0500166// CommandBatch implementation.
167RendererVk::CommandBatch::CommandBatch()
168{
169}
170
171RendererVk::CommandBatch::~CommandBatch()
172{
173}
174
175RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
176 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
177{
178}
179
180RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
181{
182 std::swap(commandPool, other.commandPool);
183 std::swap(fence, other.fence);
184 std::swap(serial, other.serial);
185 return *this;
186}
187
Jamie Madill9f2a8612017-11-30 12:43:09 -0500188// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500189RendererVk::RendererVk()
190 : mCapsInitialized(false),
191 mInstance(VK_NULL_HANDLE),
192 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500193 mDebugReportCallback(VK_NULL_HANDLE),
194 mPhysicalDevice(VK_NULL_HANDLE),
195 mQueue(VK_NULL_HANDLE),
196 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
197 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500198 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400199 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
200 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill49ac74b2017-12-21 14:42:33 -0500201 mInFlightCommands()
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400202{
203}
204
205RendererVk::~RendererVk()
206{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500207 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500208 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500209 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
210 vk::Error error = finish(nullptr);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500211 if (error.isError())
212 {
213 ERR() << "Error during VK shutdown: " << error;
214 }
215 }
216
Jamie Madill8c3988c2017-12-21 14:44:56 -0500217 for (auto &descriptorSetLayout : mGraphicsDescriptorSetLayouts)
218 {
219 descriptorSetLayout.destroy(mDevice);
220 }
221
222 mGraphicsPipelineLayout.destroy(mDevice);
223
Jamie Madill9f2a8612017-11-30 12:43:09 -0500224 mRenderPassCache.destroy(mDevice);
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500225 mPipelineCache.destroy(mDevice);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500226
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500227 if (mGlslangWrapper)
228 {
229 GlslangWrapper::ReleaseReference();
230 mGlslangWrapper = nullptr;
231 }
232
Jamie Madill5deea722017-02-16 10:44:46 -0500233 if (mCommandPool.valid())
234 {
235 mCommandPool.destroy(mDevice);
236 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500237
238 if (mDevice)
239 {
240 vkDestroyDevice(mDevice, nullptr);
241 mDevice = VK_NULL_HANDLE;
242 }
243
Jamie Madill0448ec82016-12-23 13:41:47 -0500244 if (mDebugReportCallback)
245 {
246 ASSERT(mInstance);
247 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
248 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
249 ASSERT(destroyDebugReportCallback);
250 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
251 }
252
Jamie Madill4d0bf552016-12-28 15:45:24 -0500253 if (mInstance)
254 {
255 vkDestroyInstance(mInstance, nullptr);
256 mInstance = VK_NULL_HANDLE;
257 }
258
259 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500260}
261
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700262void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
263 bool preferMockICD,
264 VkPhysicalDevice *physicalDeviceOut,
265 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
266{
267 ASSERT(!physicalDevices.empty());
268 if (preferMockICD)
269 {
270 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
271 {
272 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
273 if ((kMockVendorID == physicalDevicePropertiesOut->vendorID) &&
274 (kMockDeviceID == physicalDevicePropertiesOut->deviceID) &&
275 (strcmp(kMockDeviceName, physicalDevicePropertiesOut->deviceName) == 0))
276 {
277 *physicalDeviceOut = physicalDevice;
278 return;
279 }
280 }
281 WARN() << "Vulkan Mock Driver was requested but Mock Device was not found. Using default "
282 "physicalDevice instead.";
283 }
284
285 // Fall back to first device.
286 *physicalDeviceOut = physicalDevices[0];
287 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
288}
289
Frank Henigman29f148b2016-11-23 21:05:36 -0500290vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500291{
Jamie Madilldf9ad2b2018-02-02 12:40:01 -0500292 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs));
Yuly Novikov199f4292018-01-19 19:04:05 -0500293 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
Jamie Madilla66779f2017-01-06 10:43:44 -0500294
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700295 bool enableNullDriver = false;
296#if !defined(ANGLE_PLATFORM_ANDROID)
297 // Mock ICD does not currently run on Android
298 enableNullDriver = (attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
299 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE) ==
300 EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE);
301 if (enableNullDriver)
302 {
303 // Override environment variable to use built Mock ICD
304 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
305 ANGLE_VK_CHECK(angle::SetEnvironmentVar(g_VkICDPathEnv, ANGLE_VK_ICD_JSON),
306 VK_ERROR_INITIALIZATION_FAILED);
307 }
308#endif // !defined(ANGLE_PLATFORM_ANDROID)
Jamie Madill0448ec82016-12-23 13:41:47 -0500309 // Gather global layer properties.
310 uint32_t instanceLayerCount = 0;
311 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
312
313 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
314 if (instanceLayerCount > 0)
315 {
316 ANGLE_VK_TRY(
317 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
318 }
319
Jamie Madille09bd5d2016-11-29 16:20:35 -0500320 uint32_t instanceExtensionCount = 0;
321 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
322
323 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
324 if (instanceExtensionCount > 0)
325 {
326 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
327 instanceExtensionProps.data()));
328 }
329
Yuly Novikov199f4292018-01-19 19:04:05 -0500330 const char *const *enabledLayerNames = nullptr;
331 uint32_t enabledLayerCount = 0;
Jamie Madill0448ec82016-12-23 13:41:47 -0500332 if (mEnableValidationLayers)
333 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500334 bool layersRequested =
335 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
336 mEnableValidationLayers = GetAvailableValidationLayers(
337 instanceLayerProps, layersRequested, &enabledLayerNames, &enabledLayerCount);
Jamie Madill0448ec82016-12-23 13:41:47 -0500338 }
339
Jamie Madille09bd5d2016-11-29 16:20:35 -0500340 std::vector<const char *> enabledInstanceExtensions;
341 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500342 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500343
Jamie Madill0448ec82016-12-23 13:41:47 -0500344 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
345 if (mEnableValidationLayers)
346 {
347 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
348 }
349
Jamie Madille09bd5d2016-11-29 16:20:35 -0500350 // Verify the required extensions are in the extension names set. Fail if not.
351 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
352
Jamie Madill327ba852016-11-30 12:38:28 -0500353 VkApplicationInfo applicationInfo;
354 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
355 applicationInfo.pNext = nullptr;
356 applicationInfo.pApplicationName = "ANGLE";
357 applicationInfo.applicationVersion = 1;
358 applicationInfo.pEngineName = "ANGLE";
359 applicationInfo.engineVersion = 1;
360 applicationInfo.apiVersion = VK_API_VERSION_1_0;
361
362 VkInstanceCreateInfo instanceInfo;
363 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
364 instanceInfo.pNext = nullptr;
365 instanceInfo.flags = 0;
366 instanceInfo.pApplicationInfo = &applicationInfo;
367
Jamie Madille09bd5d2016-11-29 16:20:35 -0500368 // Enable requested layers and extensions.
369 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
370 instanceInfo.ppEnabledExtensionNames =
371 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Yuly Novikov199f4292018-01-19 19:04:05 -0500372 instanceInfo.enabledLayerCount = enabledLayerCount;
373 instanceInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill327ba852016-11-30 12:38:28 -0500374
375 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
376
Jamie Madill0448ec82016-12-23 13:41:47 -0500377 if (mEnableValidationLayers)
378 {
379 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
380
381 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
382 debugReportInfo.pNext = nullptr;
383 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
384 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
385 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
386 debugReportInfo.pfnCallback = &DebugReportCallback;
387 debugReportInfo.pUserData = this;
388
389 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
390 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
391 ASSERT(createDebugReportCallback);
392 ANGLE_VK_TRY(
393 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
394 }
395
Jamie Madill4d0bf552016-12-28 15:45:24 -0500396 uint32_t physicalDeviceCount = 0;
397 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
398 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
399
400 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700401 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
402 ANGLE_VK_TRY(
403 vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, physicalDevices.data()));
404 ChoosePhysicalDevice(physicalDevices, enableNullDriver, &mPhysicalDevice,
405 &mPhysicalDeviceProperties);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500406
407 // Ensure we can find a graphics queue family.
408 uint32_t queueCount = 0;
409 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
410
411 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
412
413 mQueueFamilyProperties.resize(queueCount);
414 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
415 mQueueFamilyProperties.data());
416
417 size_t graphicsQueueFamilyCount = false;
418 uint32_t firstGraphicsQueueFamily = 0;
419 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
420 {
421 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
422 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
423 {
424 ASSERT(queueInfo.queueCount > 0);
425 graphicsQueueFamilyCount++;
426 if (firstGraphicsQueueFamily == 0)
427 {
428 firstGraphicsQueueFamily = familyIndex;
429 }
430 break;
431 }
432 }
433
434 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
435
436 // If only one queue family, go ahead and initialize the device. If there is more than one
437 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
438 if (graphicsQueueFamilyCount == 1)
439 {
440 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
441 }
442
Jamie Madill035fd6b2017-10-03 15:43:22 -0400443 // Store the physical device memory properties so we can find the right memory pools.
444 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500445
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500446 mGlslangWrapper = GlslangWrapper::GetReference();
447
Jamie Madill6a89d222017-11-02 11:59:51 -0400448 // Initialize the format table.
Luc Ferrond50537a2018-02-07 17:02:08 -0500449 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps,
450 &mNativeCaps.compressedTextureFormats);
Jamie Madill6a89d222017-11-02 11:59:51 -0400451
Jamie Madill8c3988c2017-12-21 14:44:56 -0500452 // Initialize the pipeline layout for GL programs.
453 ANGLE_TRY(initGraphicsPipelineLayout());
454
Jamie Madill327ba852016-11-30 12:38:28 -0500455 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400456}
457
Jamie Madill4d0bf552016-12-28 15:45:24 -0500458vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
459{
460 uint32_t deviceLayerCount = 0;
461 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
462
463 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
464 if (deviceLayerCount > 0)
465 {
466 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
467 deviceLayerProps.data()));
468 }
469
470 uint32_t deviceExtensionCount = 0;
471 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
472 &deviceExtensionCount, nullptr));
473
474 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
475 if (deviceExtensionCount > 0)
476 {
477 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
478 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
479 }
480
Yuly Novikov199f4292018-01-19 19:04:05 -0500481 const char *const *enabledLayerNames = nullptr;
482 uint32_t enabledLayerCount = 0;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500483 if (mEnableValidationLayers)
484 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500485 mEnableValidationLayers = GetAvailableValidationLayers(
486 deviceLayerProps, false, &enabledLayerNames, &enabledLayerCount);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500487 }
488
489 std::vector<const char *> enabledDeviceExtensions;
490 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
491
492 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
493
494 VkDeviceQueueCreateInfo queueCreateInfo;
495
496 float zeroPriority = 0.0f;
497
498 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
499 queueCreateInfo.pNext = nullptr;
500 queueCreateInfo.flags = 0;
501 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
502 queueCreateInfo.queueCount = 1;
503 queueCreateInfo.pQueuePriorities = &zeroPriority;
504
505 // Initialize the device
506 VkDeviceCreateInfo createInfo;
507
508 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
509 createInfo.pNext = nullptr;
510 createInfo.flags = 0;
511 createInfo.queueCreateInfoCount = 1;
512 createInfo.pQueueCreateInfos = &queueCreateInfo;
Yuly Novikov199f4292018-01-19 19:04:05 -0500513 createInfo.enabledLayerCount = enabledLayerCount;
514 createInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500515 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
516 createInfo.ppEnabledExtensionNames =
517 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
518 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
519
520 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
521
522 mCurrentQueueFamilyIndex = queueFamilyIndex;
523
524 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
525
526 // Initialize the command pool now that we know the queue family index.
527 VkCommandPoolCreateInfo commandPoolInfo;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500528 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
529 commandPoolInfo.pNext = nullptr;
530 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500531 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
532
Jamie Madill5deea722017-02-16 10:44:46 -0500533 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500534
Jamie Madill4d0bf552016-12-28 15:45:24 -0500535 return vk::NoError();
536}
537
538vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
539{
540 // We've already initialized a device, and can't re-create it unless it's never been used.
541 // TODO(jmadill): Handle the re-creation case if necessary.
542 if (mDevice != VK_NULL_HANDLE)
543 {
544 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
545
546 // Check if the current device supports present on this surface.
547 VkBool32 supportsPresent = VK_FALSE;
548 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
549 surface, &supportsPresent));
550
551 return (supportsPresent == VK_TRUE);
552 }
553
554 // Find a graphics and present queue.
555 Optional<uint32_t> newPresentQueue;
556 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
557 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
558 {
559 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
560 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
561 {
562 VkBool32 supportsPresent = VK_FALSE;
563 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
564 &supportsPresent));
565
566 if (supportsPresent == VK_TRUE)
567 {
568 newPresentQueue = queueIndex;
569 break;
570 }
571 }
572 }
573
574 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
575 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
576
577 return newPresentQueue.value();
578}
579
580std::string RendererVk::getVendorString() const
581{
582 switch (mPhysicalDeviceProperties.vendorID)
583 {
584 case VENDOR_ID_AMD:
585 return "Advanced Micro Devices";
586 case VENDOR_ID_NVIDIA:
587 return "NVIDIA";
588 case VENDOR_ID_INTEL:
589 return "Intel";
590 default:
591 {
592 // TODO(jmadill): More vendor IDs.
593 std::stringstream strstr;
594 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
595 return strstr.str();
596 }
597 }
598}
599
Jamie Madille09bd5d2016-11-29 16:20:35 -0500600std::string RendererVk::getRendererDescription() const
601{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500602 std::stringstream strstr;
603
604 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
605
606 strstr << "Vulkan ";
607 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
608 strstr << VK_VERSION_MINOR(apiVersion) << ".";
609 strstr << VK_VERSION_PATCH(apiVersion);
610
611 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
612
613 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500614}
615
Jamie Madillacccc6c2016-05-03 17:22:10 -0400616void RendererVk::ensureCapsInitialized() const
617{
618 if (!mCapsInitialized)
619 {
Luc Ferrond50537a2018-02-07 17:02:08 -0500620 vk::GenerateCaps(mPhysicalDeviceProperties, mNativeTextureCaps, &mNativeCaps,
Luc Ferrone4741fd2018-01-25 13:25:27 -0500621 &mNativeExtensions, &mNativeLimitations);
Jamie Madillacccc6c2016-05-03 17:22:10 -0400622 mCapsInitialized = true;
623 }
624}
625
Jamie Madillacccc6c2016-05-03 17:22:10 -0400626const gl::Caps &RendererVk::getNativeCaps() const
627{
628 ensureCapsInitialized();
629 return mNativeCaps;
630}
631
632const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
633{
634 ensureCapsInitialized();
635 return mNativeTextureCaps;
636}
637
638const gl::Extensions &RendererVk::getNativeExtensions() const
639{
640 ensureCapsInitialized();
641 return mNativeExtensions;
642}
643
644const gl::Limitations &RendererVk::getNativeLimitations() const
645{
646 ensureCapsInitialized();
647 return mNativeLimitations;
648}
649
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400650uint32_t RendererVk::getMaxActiveTextures()
651{
652 // TODO(lucferron): expose this limitation to GL in Context Caps
653 return std::min<uint32_t>(mPhysicalDeviceProperties.limits.maxPerStageDescriptorSamplers,
654 gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES);
655}
656
657uint32_t RendererVk::getUniformBufferDescriptorCount()
658{
659 return kUniformBufferDescriptorsPerDescriptorSet;
660}
661
Jamie Madill49ac74b2017-12-21 14:42:33 -0500662const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500663{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500664 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500665}
666
Jamie Madill49ac74b2017-12-21 14:42:33 -0500667vk::Error RendererVk::finish(const gl::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500668{
Jamie Madill1f46bc12018-02-20 16:09:43 -0500669 if (!mCommandGraph.empty())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500670 {
671 vk::CommandBuffer commandBatch;
672 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400673
Jamie Madill49ac74b2017-12-21 14:42:33 -0500674 VkSubmitInfo submitInfo;
675 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
676 submitInfo.pNext = nullptr;
677 submitInfo.waitSemaphoreCount = 0;
678 submitInfo.pWaitSemaphores = nullptr;
679 submitInfo.pWaitDstStageMask = nullptr;
680 submitInfo.commandBufferCount = 1;
681 submitInfo.pCommandBuffers = commandBatch.ptr();
682 submitInfo.signalSemaphoreCount = 0;
683 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500684
Jamie Madill49ac74b2017-12-21 14:42:33 -0500685 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
686 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500687
Jamie Madill4c26fc22017-02-24 11:04:10 -0500688 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500689 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400690 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500691 return vk::NoError();
692}
693
Jamie Madill0c0dc342017-03-24 14:18:51 -0400694void RendererVk::freeAllInFlightResources()
695{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500696 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -0400697 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500698 batch.fence.destroy(mDevice);
699 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400700 }
701 mInFlightCommands.clear();
702
703 for (auto &garbage : mGarbage)
704 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400705 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400706 }
707 mGarbage.clear();
708}
709
Jamie Madill4c26fc22017-02-24 11:04:10 -0500710vk::Error RendererVk::checkInFlightCommands()
711{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500712 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500713
Jamie Madill49ac74b2017-12-21 14:42:33 -0500714 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500715 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500716 VkResult result = batch.fence.getStatus(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400717 if (result == VK_NOT_READY)
718 break;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500719
Jamie Madill0c0dc342017-03-24 14:18:51 -0400720 ANGLE_VK_TRY(result);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500721 ASSERT(batch.serial > mLastCompletedQueueSerial);
722 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400723
Jamie Madill49ac74b2017-12-21 14:42:33 -0500724 batch.fence.destroy(mDevice);
725 batch.commandPool.destroy(mDevice);
726 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500727 }
728
Jamie Madill49ac74b2017-12-21 14:42:33 -0500729 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400730
731 size_t freeIndex = 0;
732 for (; freeIndex < mGarbage.size(); ++freeIndex)
733 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500734 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400735 break;
736 }
737
738 // Remove the entries from the garbage list - they should be ready to go.
739 if (freeIndex > 0)
740 {
741 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500742 }
743
Jamie Madill4c26fc22017-02-24 11:04:10 -0500744 return vk::NoError();
745}
746
Jamie Madill49ac74b2017-12-21 14:42:33 -0500747vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo, vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500748{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500749 VkFenceCreateInfo fenceInfo;
750 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
751 fenceInfo.pNext = nullptr;
752 fenceInfo.flags = 0;
753
754 CommandBatch batch;
755 ANGLE_TRY(batch.fence.init(mDevice, fenceInfo));
756
757 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500758
759 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -0500760 batch.commandPool = std::move(mCommandPool);
761 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500762
Jamie Madill49ac74b2017-12-21 14:42:33 -0500763 mInFlightCommands.emplace_back(std::move(batch));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400764
765 // Sanity check.
766 ASSERT(mInFlightCommands.size() < 1000u);
767
768 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400769 // TODO(jmadill): Overflow check.
770 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400771
772 ANGLE_TRY(checkInFlightCommands());
773
Jamie Madill49ac74b2017-12-21 14:42:33 -0500774 // Simply null out the command buffer here - it was allocated using the command pool.
775 commandBuffer.releaseHandle();
776
777 // Reallocate the command pool for next frame.
778 // TODO(jmadill): Consider reusing command pools.
779 VkCommandPoolCreateInfo poolInfo;
780 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
781 poolInfo.pNext = nullptr;
782 poolInfo.flags = 0;
783 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
784
785 mCommandPool.init(mDevice, poolInfo);
786
Jamie Madill4c26fc22017-02-24 11:04:10 -0500787 return vk::NoError();
788}
789
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500790GlslangWrapper *RendererVk::getGlslangWrapper()
791{
792 return mGlslangWrapper;
793}
794
Jamie Madill4c26fc22017-02-24 11:04:10 -0500795Serial RendererVk::getCurrentQueueSerial() const
796{
797 return mCurrentQueueSerial;
798}
799
Jamie Madill97760352017-11-09 13:08:29 -0500800bool RendererVk::isResourceInUse(const ResourceVk &resource)
801{
802 return isSerialInUse(resource.getQueueSerial());
803}
804
805bool RendererVk::isSerialInUse(Serial serial)
806{
807 return serial > mLastCompletedQueueSerial;
808}
809
Jamie Madill9f2a8612017-11-30 12:43:09 -0500810vk::Error RendererVk::getCompatibleRenderPass(const vk::RenderPassDesc &desc,
811 vk::RenderPass **renderPassOut)
812{
813 return mRenderPassCache.getCompatibleRenderPass(mDevice, mCurrentQueueSerial, desc,
814 renderPassOut);
815}
816
Jamie Madillbef918c2017-12-13 13:11:30 -0500817vk::Error RendererVk::getRenderPassWithOps(const vk::RenderPassDesc &desc,
818 const vk::AttachmentOpsArray &ops,
819 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500820{
Jamie Madillbef918c2017-12-13 13:11:30 -0500821 return mRenderPassCache.getRenderPassWithOps(mDevice, mCurrentQueueSerial, desc, ops,
822 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500823}
824
Jamie Madill1f46bc12018-02-20 16:09:43 -0500825vk::CommandGraphNode *RendererVk::allocateCommandNode()
Jamie Madill49ac74b2017-12-21 14:42:33 -0500826{
Jamie Madill1f46bc12018-02-20 16:09:43 -0500827 return mCommandGraph.allocateNode();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500828}
829
830vk::Error RendererVk::flushCommandGraph(const gl::Context *context, vk::CommandBuffer *commandBatch)
831{
Jamie Madill1f46bc12018-02-20 16:09:43 -0500832 return mCommandGraph.submitCommands(mDevice, mCurrentQueueSerial, &mRenderPassCache,
833 &mCommandPool, commandBatch);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500834}
835
836vk::Error RendererVk::flush(const gl::Context *context,
837 const vk::Semaphore &waitSemaphore,
838 const vk::Semaphore &signalSemaphore)
839{
840 vk::CommandBuffer commandBatch;
841 ANGLE_TRY(flushCommandGraph(context, &commandBatch));
842
843 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
844
845 VkSubmitInfo submitInfo;
846 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
847 submitInfo.pNext = nullptr;
848 submitInfo.waitSemaphoreCount = 1;
849 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
850 submitInfo.pWaitDstStageMask = &waitStageMask;
851 submitInfo.commandBufferCount = 1;
852 submitInfo.pCommandBuffers = commandBatch.ptr();
853 submitInfo.signalSemaphoreCount = 1;
854 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
855
856 ANGLE_TRY(submitFrame(submitInfo, std::move(commandBatch)));
857 return vk::NoError();
858}
859
Jamie Madill8c3988c2017-12-21 14:44:56 -0500860const vk::PipelineLayout &RendererVk::getGraphicsPipelineLayout() const
861{
862 return mGraphicsPipelineLayout;
863}
864
865const std::vector<vk::DescriptorSetLayout> &RendererVk::getGraphicsDescriptorSetLayouts() const
866{
867 return mGraphicsDescriptorSetLayouts;
868}
869
870vk::Error RendererVk::initGraphicsPipelineLayout()
871{
872 ASSERT(!mGraphicsPipelineLayout.valid());
873
874 // Create two descriptor set layouts: one for default uniform info, and one for textures.
875 // Skip one or both if there are no uniforms.
876 VkDescriptorSetLayoutBinding uniformBindings[2];
877 uint32_t blockCount = 0;
878
879 {
880 auto &layoutBinding = uniformBindings[blockCount];
881
882 layoutBinding.binding = blockCount;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400883 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill8c3988c2017-12-21 14:44:56 -0500884 layoutBinding.descriptorCount = 1;
885 layoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
886 layoutBinding.pImmutableSamplers = nullptr;
887
888 blockCount++;
889 }
890
891 {
892 auto &layoutBinding = uniformBindings[blockCount];
893
894 layoutBinding.binding = blockCount;
Luc Ferron7a06ac12018-03-15 10:17:04 -0400895 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
Jamie Madill8c3988c2017-12-21 14:44:56 -0500896 layoutBinding.descriptorCount = 1;
897 layoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
898 layoutBinding.pImmutableSamplers = nullptr;
899
900 blockCount++;
901 }
902
903 {
904 VkDescriptorSetLayoutCreateInfo uniformInfo;
905 uniformInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
906 uniformInfo.pNext = nullptr;
907 uniformInfo.flags = 0;
908 uniformInfo.bindingCount = blockCount;
909 uniformInfo.pBindings = uniformBindings;
910
911 vk::DescriptorSetLayout uniformLayout;
912 ANGLE_TRY(uniformLayout.init(mDevice, uniformInfo));
913 mGraphicsDescriptorSetLayouts.push_back(std::move(uniformLayout));
914 }
915
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400916 std::vector<VkDescriptorSetLayoutBinding> textureBindings(getMaxActiveTextures());
Jamie Madill8c3988c2017-12-21 14:44:56 -0500917
918 // TODO(jmadill): This approach might not work well for texture arrays.
Yuly Novikov37968132018-01-23 18:19:29 -0500919 for (uint32_t textureIndex = 0; textureIndex < textureBindings.size(); ++textureIndex)
Jamie Madill8c3988c2017-12-21 14:44:56 -0500920 {
921 VkDescriptorSetLayoutBinding &layoutBinding = textureBindings[textureIndex];
922
923 layoutBinding.binding = textureIndex;
924 layoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
925 layoutBinding.descriptorCount = 1;
926 layoutBinding.stageFlags = (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
927 layoutBinding.pImmutableSamplers = nullptr;
928 }
929
930 {
931 VkDescriptorSetLayoutCreateInfo textureInfo;
932 textureInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
933 textureInfo.pNext = nullptr;
934 textureInfo.flags = 0;
935 textureInfo.bindingCount = static_cast<uint32_t>(textureBindings.size());
936 textureInfo.pBindings = textureBindings.data();
937
938 vk::DescriptorSetLayout textureLayout;
939 ANGLE_TRY(textureLayout.init(mDevice, textureInfo));
940 mGraphicsDescriptorSetLayouts.push_back(std::move(textureLayout));
941 }
942
943 VkPipelineLayoutCreateInfo createInfo;
944 createInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
945 createInfo.pNext = nullptr;
946 createInfo.flags = 0;
947 createInfo.setLayoutCount = static_cast<uint32_t>(mGraphicsDescriptorSetLayouts.size());
948 createInfo.pSetLayouts = mGraphicsDescriptorSetLayouts[0].ptr();
949 createInfo.pushConstantRangeCount = 0;
950 createInfo.pPushConstantRanges = nullptr;
951
952 ANGLE_TRY(mGraphicsPipelineLayout.init(mDevice, createInfo));
953
954 return vk::NoError();
955}
Jamie Madillf2f6d372018-01-10 21:37:23 -0500956
957Serial RendererVk::issueProgramSerial()
958{
959 return mProgramSerialFactory.generate();
960}
961
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500962vk::Error RendererVk::getPipeline(const ProgramVk *programVk,
963 const vk::PipelineDesc &desc,
Luc Ferronceb71902018-02-05 15:18:47 -0500964 const gl::AttributesMask &activeAttribLocationsMask,
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500965 vk::PipelineAndSerial **pipelineOut)
966{
967 ASSERT(programVk->getVertexModuleSerial() == desc.getShaderStageInfo()[0].moduleSerial);
968 ASSERT(programVk->getFragmentModuleSerial() == desc.getShaderStageInfo()[1].moduleSerial);
969
970 // Pull in a compatible RenderPass.
971 vk::RenderPass *compatibleRenderPass = nullptr;
972 ANGLE_TRY(getCompatibleRenderPass(desc.getRenderPassDesc(), &compatibleRenderPass));
973
974 return mPipelineCache.getPipeline(mDevice, *compatibleRenderPass, mGraphicsPipelineLayout,
Luc Ferronceb71902018-02-05 15:18:47 -0500975 activeAttribLocationsMask, programVk->getLinkedVertexModule(),
Jamie Madillffa4cbb2018-01-23 13:04:07 -0500976 programVk->getLinkedFragmentModule(), desc, pipelineOut);
977}
978
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400979} // namespace rx