blob: 15edbffa161b5f8410c9d6dffa373d92f78ff191 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
13#include "libANGLE/renderer/vulkan/renderervk_utils.h"
14
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050019#include "libANGLE/renderer/driver_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050020#include "libANGLE/renderer/vulkan/CompilerVk.h"
21#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050022#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050023#include "libANGLE/renderer/vulkan/TextureVk.h"
24#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Jamie Madill7b57b9d2017-01-13 09:33:38 -050025#include "libANGLE/renderer/vulkan/formatutilsvk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050026#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040027
28namespace rx
29{
30
Jamie Madille09bd5d2016-11-29 16:20:35 -050031namespace
32{
33
34VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
35 const std::vector<const char *> &enabledExtensionNames)
36{
37 // Compile the extensions names into a set.
38 std::set<std::string> extensionNames;
39 for (const auto &extensionProp : extensionProps)
40 {
41 extensionNames.insert(extensionProp.extensionName);
42 }
43
Jamie Madillacf2f3a2017-11-21 19:22:44 -050044 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050045 {
46 if (extensionNames.count(extensionName) == 0)
47 {
48 return VK_ERROR_EXTENSION_NOT_PRESENT;
49 }
50 }
51
52 return VK_SUCCESS;
53}
54
Jamie Madill0448ec82016-12-23 13:41:47 -050055VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
56 VkDebugReportObjectTypeEXT objectType,
57 uint64_t object,
58 size_t location,
59 int32_t messageCode,
60 const char *layerPrefix,
61 const char *message,
62 void *userData)
63{
64 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
65 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050066 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050067#if !defined(NDEBUG)
68 // Abort the call in Debug builds.
69 return VK_TRUE;
70#endif
71 }
72 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
73 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050074 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050075 }
76 else
77 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -050078 // Uncomment this if you want Vulkan spam.
79 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -050080 }
81
82 return VK_FALSE;
83}
84
Jamie Madille09bd5d2016-11-29 16:20:35 -050085} // anonymous namespace
86
Jamie Madill9f2a8612017-11-30 12:43:09 -050087// RenderPassCache implementation.
88RenderPassCache::RenderPassCache()
89{
90}
91
92RenderPassCache::~RenderPassCache()
93{
94 ASSERT(mPayload.empty());
95}
96
97void RenderPassCache::destroy(VkDevice device)
98{
Jamie Madillbef918c2017-12-13 13:11:30 -050099 for (auto &outerIt : mPayload)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500100 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500101 for (auto &innerIt : outerIt.second)
102 {
103 innerIt.second.get().destroy(device);
104 }
Jamie Madill9f2a8612017-11-30 12:43:09 -0500105 }
106 mPayload.clear();
107}
108
109vk::Error RenderPassCache::getCompatibleRenderPass(VkDevice device,
110 Serial serial,
111 const vk::RenderPassDesc &desc,
112 vk::RenderPass **renderPassOut)
113{
Jamie Madillbef918c2017-12-13 13:11:30 -0500114 auto outerIt = mPayload.find(desc);
115 if (outerIt != mPayload.end())
Jamie Madill9f2a8612017-11-30 12:43:09 -0500116 {
Jamie Madillbef918c2017-12-13 13:11:30 -0500117 InnerCache &innerCache = outerIt->second;
118 ASSERT(!innerCache.empty());
Jamie Madill9f2a8612017-11-30 12:43:09 -0500119
Jamie Madillbef918c2017-12-13 13:11:30 -0500120 // Find the first element and return it.
121 *renderPassOut = &innerCache.begin()->second.get();
Jamie Madill9f2a8612017-11-30 12:43:09 -0500122 return vk::NoError();
123 }
124
Jamie Madillbef918c2017-12-13 13:11:30 -0500125 // Insert some dummy attachment ops.
126 // TODO(jmadill): Pre-populate the cache in the Renderer so we rarely miss here.
127 vk::AttachmentOpsArray ops;
128 for (uint32_t colorIndex = 0; colorIndex < desc.colorAttachmentCount(); ++colorIndex)
129 {
130 ops.initDummyOp(colorIndex, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
131 }
132
133 if (desc.depthStencilAttachmentCount() > 0)
134 {
135 ops.initDummyOp(desc.colorAttachmentCount(),
136 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
137 }
138
139 return getRenderPassWithOps(device, serial, desc, ops, renderPassOut);
140}
141
142vk::Error RenderPassCache::getRenderPassWithOps(VkDevice device,
143 Serial serial,
144 const vk::RenderPassDesc &desc,
145 const vk::AttachmentOpsArray &attachmentOps,
146 vk::RenderPass **renderPassOut)
147{
148 auto outerIt = mPayload.find(desc);
149 if (outerIt != mPayload.end())
150 {
151 InnerCache &innerCache = outerIt->second;
152
153 auto innerIt = innerCache.find(attachmentOps);
154 if (innerIt != innerCache.end())
155 {
156 // Update the serial before we return.
157 // TODO(jmadill): Could possibly use an MRU cache here.
158 innerIt->second.updateSerial(serial);
159 *renderPassOut = &innerIt->second.get();
160 return vk::NoError();
161 }
162 }
163 else
164 {
165 auto emplaceResult = mPayload.emplace(desc, InnerCache());
166 outerIt = emplaceResult.first;
167 }
168
Jamie Madill9f2a8612017-11-30 12:43:09 -0500169 vk::RenderPass newRenderPass;
Jamie Madillbef918c2017-12-13 13:11:30 -0500170 ANGLE_TRY(vk::InitializeRenderPassFromDesc(device, desc, attachmentOps, &newRenderPass));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500171
172 vk::RenderPassAndSerial withSerial(std::move(newRenderPass), serial);
173
Jamie Madillbef918c2017-12-13 13:11:30 -0500174 InnerCache &innerCache = outerIt->second;
175 auto insertPos = innerCache.emplace(attachmentOps, std::move(withSerial));
Jamie Madill9f2a8612017-11-30 12:43:09 -0500176 *renderPassOut = &insertPos.first->second.get();
177
178 // TODO(jmadill): Trim cache, and pre-populate with the most common RPs on startup.
179 return vk::NoError();
180}
181
182// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500183RendererVk::RendererVk()
184 : mCapsInitialized(false),
185 mInstance(VK_NULL_HANDLE),
186 mEnableValidationLayers(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500187 mDebugReportCallback(VK_NULL_HANDLE),
188 mPhysicalDevice(VK_NULL_HANDLE),
189 mQueue(VK_NULL_HANDLE),
190 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
191 mDevice(VK_NULL_HANDLE),
Jamie Madill4c26fc22017-02-24 11:04:10 -0500192 mGlslangWrapper(nullptr),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400193 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
194 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Jamie Madill1b038242017-11-01 15:14:36 -0400195 mInFlightCommands(),
196 mCurrentRenderPassFramebuffer(nullptr)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400197{
198}
199
200RendererVk::~RendererVk()
201{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400202 if (!mInFlightCommands.empty() || !mInFlightFences.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500203 {
204 vk::Error error = finish();
205 if (error.isError())
206 {
207 ERR() << "Error during VK shutdown: " << error;
208 }
209 }
210
Jamie Madill9f2a8612017-11-30 12:43:09 -0500211 mRenderPassCache.destroy(mDevice);
212
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500213 if (mGlslangWrapper)
214 {
215 GlslangWrapper::ReleaseReference();
216 mGlslangWrapper = nullptr;
217 }
218
Jamie Madill5deea722017-02-16 10:44:46 -0500219 if (mCommandBuffer.valid())
220 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500221 mCommandBuffer.destroy(mDevice, mCommandPool);
Jamie Madill5deea722017-02-16 10:44:46 -0500222 }
223
224 if (mCommandPool.valid())
225 {
226 mCommandPool.destroy(mDevice);
227 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500228
229 if (mDevice)
230 {
231 vkDestroyDevice(mDevice, nullptr);
232 mDevice = VK_NULL_HANDLE;
233 }
234
Jamie Madill0448ec82016-12-23 13:41:47 -0500235 if (mDebugReportCallback)
236 {
237 ASSERT(mInstance);
238 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
239 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
240 ASSERT(destroyDebugReportCallback);
241 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
242 }
243
Jamie Madill4d0bf552016-12-28 15:45:24 -0500244 if (mInstance)
245 {
246 vkDestroyInstance(mInstance, nullptr);
247 mInstance = VK_NULL_HANDLE;
248 }
249
250 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500251}
252
Frank Henigman29f148b2016-11-23 21:05:36 -0500253vk::Error RendererVk::initialize(const egl::AttributeMap &attribs, const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500254{
Jamie Madill222c5172017-07-19 16:15:42 -0400255 mEnableValidationLayers = ShouldUseDebugLayers(attribs);
Jamie Madilla66779f2017-01-06 10:43:44 -0500256
257 // If we're loading the validation layers, we could be running from any random directory.
258 // Change to the executable directory so we can find the layers, then change back to the
259 // previous directory to be safe we don't disrupt the application.
260 std::string previousCWD;
261
262 if (mEnableValidationLayers)
263 {
264 const auto &cwd = angle::GetCWD();
265 if (!cwd.valid())
266 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500267 ERR() << "Error getting CWD for Vulkan layers init.";
Jamie Madilla66779f2017-01-06 10:43:44 -0500268 mEnableValidationLayers = false;
269 }
270 else
271 {
272 previousCWD = cwd.value();
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400273 const char *exeDir = angle::GetExecutableDirectory();
274 if (!angle::SetCWD(exeDir))
275 {
276 ERR() << "Error setting CWD for Vulkan layers init.";
277 mEnableValidationLayers = false;
278 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500279 }
Jamie Madillb8bbbf92017-09-19 00:24:59 -0400280 }
281
282 // Override environment variable to use the ANGLE layers.
283 if (mEnableValidationLayers)
284 {
285 if (!angle::SetEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
286 {
287 ERR() << "Error setting environment for Vulkan layers init.";
288 mEnableValidationLayers = false;
289 }
Jamie Madilla66779f2017-01-06 10:43:44 -0500290 }
291
Jamie Madill0448ec82016-12-23 13:41:47 -0500292 // Gather global layer properties.
293 uint32_t instanceLayerCount = 0;
294 ANGLE_VK_TRY(vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
295
296 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
297 if (instanceLayerCount > 0)
298 {
299 ANGLE_VK_TRY(
300 vkEnumerateInstanceLayerProperties(&instanceLayerCount, instanceLayerProps.data()));
301 }
302
Jamie Madille09bd5d2016-11-29 16:20:35 -0500303 uint32_t instanceExtensionCount = 0;
304 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
305
306 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
307 if (instanceExtensionCount > 0)
308 {
309 ANGLE_VK_TRY(vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
310 instanceExtensionProps.data()));
311 }
312
Jamie Madill0448ec82016-12-23 13:41:47 -0500313 if (mEnableValidationLayers)
314 {
315 // Verify the standard validation layers are available.
316 if (!HasStandardValidationLayer(instanceLayerProps))
317 {
318 // Generate an error if the attribute was requested, warning otherwise.
Jamie Madill222c5172017-07-19 16:15:42 -0400319 if (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) ==
320 EGL_TRUE)
Jamie Madill0448ec82016-12-23 13:41:47 -0500321 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500322 ERR() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500323 }
324 else
325 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500326 WARN() << "Vulkan standard validation layers are missing.";
Jamie Madill0448ec82016-12-23 13:41:47 -0500327 }
328 mEnableValidationLayers = false;
329 }
330 }
331
Jamie Madille09bd5d2016-11-29 16:20:35 -0500332 std::vector<const char *> enabledInstanceExtensions;
333 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500334 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500335
Jamie Madill0448ec82016-12-23 13:41:47 -0500336 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
337 if (mEnableValidationLayers)
338 {
339 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
340 }
341
Jamie Madille09bd5d2016-11-29 16:20:35 -0500342 // Verify the required extensions are in the extension names set. Fail if not.
343 ANGLE_VK_TRY(VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
344
Jamie Madill327ba852016-11-30 12:38:28 -0500345 VkApplicationInfo applicationInfo;
346 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
347 applicationInfo.pNext = nullptr;
348 applicationInfo.pApplicationName = "ANGLE";
349 applicationInfo.applicationVersion = 1;
350 applicationInfo.pEngineName = "ANGLE";
351 applicationInfo.engineVersion = 1;
352 applicationInfo.apiVersion = VK_API_VERSION_1_0;
353
354 VkInstanceCreateInfo instanceInfo;
355 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
356 instanceInfo.pNext = nullptr;
357 instanceInfo.flags = 0;
358 instanceInfo.pApplicationInfo = &applicationInfo;
359
Jamie Madille09bd5d2016-11-29 16:20:35 -0500360 // Enable requested layers and extensions.
361 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
362 instanceInfo.ppEnabledExtensionNames =
363 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Jamie Madill0448ec82016-12-23 13:41:47 -0500364 instanceInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
365 instanceInfo.ppEnabledLayerNames =
366 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
Jamie Madill327ba852016-11-30 12:38:28 -0500367
368 ANGLE_VK_TRY(vkCreateInstance(&instanceInfo, nullptr, &mInstance));
369
Jamie Madill0448ec82016-12-23 13:41:47 -0500370 if (mEnableValidationLayers)
371 {
Jamie Madilla66779f2017-01-06 10:43:44 -0500372 // Change back to the previous working directory now that we've loaded the instance -
373 // the validation layers should be loaded at this point.
374 angle::SetCWD(previousCWD.c_str());
375
Jamie Madill0448ec82016-12-23 13:41:47 -0500376 VkDebugReportCallbackCreateInfoEXT debugReportInfo;
377
378 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
379 debugReportInfo.pNext = nullptr;
380 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
381 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
382 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
383 debugReportInfo.pfnCallback = &DebugReportCallback;
384 debugReportInfo.pUserData = this;
385
386 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
387 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
388 ASSERT(createDebugReportCallback);
389 ANGLE_VK_TRY(
390 createDebugReportCallback(mInstance, &debugReportInfo, nullptr, &mDebugReportCallback));
391 }
392
Jamie Madill4d0bf552016-12-28 15:45:24 -0500393 uint32_t physicalDeviceCount = 0;
394 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
395 ANGLE_VK_CHECK(physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
396
397 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
398 physicalDeviceCount = 1;
399 ANGLE_VK_TRY(vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, &mPhysicalDevice));
400
401 vkGetPhysicalDeviceProperties(mPhysicalDevice, &mPhysicalDeviceProperties);
402
403 // Ensure we can find a graphics queue family.
404 uint32_t queueCount = 0;
405 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
406
407 ANGLE_VK_CHECK(queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
408
409 mQueueFamilyProperties.resize(queueCount);
410 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
411 mQueueFamilyProperties.data());
412
413 size_t graphicsQueueFamilyCount = false;
414 uint32_t firstGraphicsQueueFamily = 0;
415 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
416 {
417 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
418 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
419 {
420 ASSERT(queueInfo.queueCount > 0);
421 graphicsQueueFamilyCount++;
422 if (firstGraphicsQueueFamily == 0)
423 {
424 firstGraphicsQueueFamily = familyIndex;
425 }
426 break;
427 }
428 }
429
430 ANGLE_VK_CHECK(graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
431
432 // If only one queue family, go ahead and initialize the device. If there is more than one
433 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
434 if (graphicsQueueFamilyCount == 1)
435 {
436 ANGLE_TRY(initializeDevice(firstGraphicsQueueFamily));
437 }
438
Jamie Madill035fd6b2017-10-03 15:43:22 -0400439 // Store the physical device memory properties so we can find the right memory pools.
440 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500441
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500442 mGlslangWrapper = GlslangWrapper::GetReference();
443
Jamie Madill6a89d222017-11-02 11:59:51 -0400444 // Initialize the format table.
445 mFormatTable.initialize(mPhysicalDevice, &mNativeTextureCaps);
446
Jamie Madill327ba852016-11-30 12:38:28 -0500447 return vk::NoError();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400448}
449
Jamie Madill4d0bf552016-12-28 15:45:24 -0500450vk::Error RendererVk::initializeDevice(uint32_t queueFamilyIndex)
451{
452 uint32_t deviceLayerCount = 0;
453 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
454
455 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
456 if (deviceLayerCount > 0)
457 {
458 ANGLE_VK_TRY(vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
459 deviceLayerProps.data()));
460 }
461
462 uint32_t deviceExtensionCount = 0;
463 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
464 &deviceExtensionCount, nullptr));
465
466 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
467 if (deviceExtensionCount > 0)
468 {
469 ANGLE_VK_TRY(vkEnumerateDeviceExtensionProperties(
470 mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
471 }
472
473 if (mEnableValidationLayers)
474 {
475 if (!HasStandardValidationLayer(deviceLayerProps))
476 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500477 WARN() << "Vulkan standard validation layer is missing.";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500478 mEnableValidationLayers = false;
479 }
480 }
481
482 std::vector<const char *> enabledDeviceExtensions;
483 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
484
485 ANGLE_VK_TRY(VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
486
487 VkDeviceQueueCreateInfo queueCreateInfo;
488
489 float zeroPriority = 0.0f;
490
491 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
492 queueCreateInfo.pNext = nullptr;
493 queueCreateInfo.flags = 0;
494 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
495 queueCreateInfo.queueCount = 1;
496 queueCreateInfo.pQueuePriorities = &zeroPriority;
497
498 // Initialize the device
499 VkDeviceCreateInfo createInfo;
500
501 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
502 createInfo.pNext = nullptr;
503 createInfo.flags = 0;
504 createInfo.queueCreateInfoCount = 1;
505 createInfo.pQueueCreateInfos = &queueCreateInfo;
506 createInfo.enabledLayerCount = mEnableValidationLayers ? 1u : 0u;
507 createInfo.ppEnabledLayerNames =
508 mEnableValidationLayers ? &g_VkStdValidationLayerName : nullptr;
509 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
510 createInfo.ppEnabledExtensionNames =
511 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
512 createInfo.pEnabledFeatures = nullptr; // TODO(jmadill): features
513
514 ANGLE_VK_TRY(vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
515
516 mCurrentQueueFamilyIndex = queueFamilyIndex;
517
518 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
519
520 // Initialize the command pool now that we know the queue family index.
521 VkCommandPoolCreateInfo commandPoolInfo;
522 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
523 commandPoolInfo.pNext = nullptr;
524 // TODO(jmadill): Investigate transient command buffers.
525 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
526 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
527
Jamie Madill5deea722017-02-16 10:44:46 -0500528 ANGLE_TRY(mCommandPool.init(mDevice, commandPoolInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500529
Jamie Madill4d0bf552016-12-28 15:45:24 -0500530 return vk::NoError();
531}
532
533vk::ErrorOrResult<uint32_t> RendererVk::selectPresentQueueForSurface(VkSurfaceKHR surface)
534{
535 // We've already initialized a device, and can't re-create it unless it's never been used.
536 // TODO(jmadill): Handle the re-creation case if necessary.
537 if (mDevice != VK_NULL_HANDLE)
538 {
539 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
540
541 // Check if the current device supports present on this surface.
542 VkBool32 supportsPresent = VK_FALSE;
543 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
544 surface, &supportsPresent));
545
546 return (supportsPresent == VK_TRUE);
547 }
548
549 // Find a graphics and present queue.
550 Optional<uint32_t> newPresentQueue;
551 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
552 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
553 {
554 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
555 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
556 {
557 VkBool32 supportsPresent = VK_FALSE;
558 ANGLE_VK_TRY(vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, queueIndex, surface,
559 &supportsPresent));
560
561 if (supportsPresent == VK_TRUE)
562 {
563 newPresentQueue = queueIndex;
564 break;
565 }
566 }
567 }
568
569 ANGLE_VK_CHECK(newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
570 ANGLE_TRY(initializeDevice(newPresentQueue.value()));
571
572 return newPresentQueue.value();
573}
574
575std::string RendererVk::getVendorString() const
576{
577 switch (mPhysicalDeviceProperties.vendorID)
578 {
579 case VENDOR_ID_AMD:
580 return "Advanced Micro Devices";
581 case VENDOR_ID_NVIDIA:
582 return "NVIDIA";
583 case VENDOR_ID_INTEL:
584 return "Intel";
585 default:
586 {
587 // TODO(jmadill): More vendor IDs.
588 std::stringstream strstr;
589 strstr << "Vendor ID: " << mPhysicalDeviceProperties.vendorID;
590 return strstr.str();
591 }
592 }
593}
594
Jamie Madille09bd5d2016-11-29 16:20:35 -0500595std::string RendererVk::getRendererDescription() const
596{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500597 std::stringstream strstr;
598
599 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
600
601 strstr << "Vulkan ";
602 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
603 strstr << VK_VERSION_MINOR(apiVersion) << ".";
604 strstr << VK_VERSION_PATCH(apiVersion);
605
606 strstr << "(" << mPhysicalDeviceProperties.deviceName << ")";
607
608 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500609}
610
Jamie Madillacccc6c2016-05-03 17:22:10 -0400611void RendererVk::ensureCapsInitialized() const
612{
613 if (!mCapsInitialized)
614 {
615 generateCaps(&mNativeCaps, &mNativeTextureCaps, &mNativeExtensions, &mNativeLimitations);
616 mCapsInitialized = true;
617 }
618}
619
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500620void RendererVk::generateCaps(gl::Caps *outCaps,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400621 gl::TextureCapsMap * /*outTextureCaps*/,
Jamie Madillb8353b02017-01-25 12:57:21 -0800622 gl::Extensions *outExtensions,
Jamie Madillacccc6c2016-05-03 17:22:10 -0400623 gl::Limitations * /* outLimitations */) const
624{
Jamie Madill327ba852016-11-30 12:38:28 -0500625 // TODO(jmadill): Caps.
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500626 outCaps->maxDrawBuffers = 1;
Jiawei-Shao2597fb62016-12-09 16:38:02 +0800627 outCaps->maxVertexAttributes = gl::MAX_VERTEX_ATTRIBS;
628 outCaps->maxVertexAttribBindings = gl::MAX_VERTEX_ATTRIB_BINDINGS;
Jamie Madill035fd6b2017-10-03 15:43:22 -0400629 outCaps->maxVaryingVectors = 16;
630 outCaps->maxTextureImageUnits = 1;
631 outCaps->maxCombinedTextureImageUnits = 1;
632 outCaps->max2DTextureSize = 1024;
Jamie Madilld03a8492017-10-03 15:46:06 -0400633 outCaps->maxElementIndex = std::numeric_limits<GLuint>::max() - 1;
Jamie Madill6276b922017-09-25 02:35:57 -0400634 outCaps->maxFragmentUniformVectors = 8;
635 outCaps->maxVertexUniformVectors = 8;
Jamie Madillb79e7bb2017-10-24 13:55:50 -0400636 outCaps->maxColorAttachments = 1;
Jamie Madillb8353b02017-01-25 12:57:21 -0800637
638 // Enable this for simple buffer readback testing, but some functionality is missing.
639 // TODO(jmadill): Support full mapBufferRange extension.
640 outExtensions->mapBuffer = true;
641 outExtensions->mapBufferRange = true;
Jamie Madillacccc6c2016-05-03 17:22:10 -0400642}
643
644const gl::Caps &RendererVk::getNativeCaps() const
645{
646 ensureCapsInitialized();
647 return mNativeCaps;
648}
649
650const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
651{
652 ensureCapsInitialized();
653 return mNativeTextureCaps;
654}
655
656const gl::Extensions &RendererVk::getNativeExtensions() const
657{
658 ensureCapsInitialized();
659 return mNativeExtensions;
660}
661
662const gl::Limitations &RendererVk::getNativeLimitations() const
663{
664 ensureCapsInitialized();
665 return mNativeLimitations;
666}
667
Jamie Madill7f738d42017-11-20 17:06:27 -0500668vk::Error RendererVk::getStartedCommandBuffer(vk::CommandBufferAndState **commandBufferOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500669{
Jamie Madill7f738d42017-11-20 17:06:27 -0500670 ANGLE_TRY(mCommandBuffer.ensureStarted(mDevice, mCommandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400671 *commandBufferOut = &mCommandBuffer;
672 return vk::NoError();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500673}
674
Jamie Madill7f738d42017-11-20 17:06:27 -0500675vk::Error RendererVk::submitCommandBuffer(vk::CommandBufferAndState *commandBuffer)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500676{
Jamie Madill7f738d42017-11-20 17:06:27 -0500677 ANGLE_TRY(commandBuffer->ensureFinished());
Jamie Madill0c0dc342017-03-24 14:18:51 -0400678
Jamie Madill4d0bf552016-12-28 15:45:24 -0500679 VkFenceCreateInfo fenceInfo;
680 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
681 fenceInfo.pNext = nullptr;
682 fenceInfo.flags = 0;
683
Jamie Madill4d0bf552016-12-28 15:45:24 -0500684 VkSubmitInfo submitInfo;
685 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
686 submitInfo.pNext = nullptr;
687 submitInfo.waitSemaphoreCount = 0;
688 submitInfo.pWaitSemaphores = nullptr;
689 submitInfo.pWaitDstStageMask = nullptr;
690 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400691 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500692 submitInfo.signalSemaphoreCount = 0;
693 submitInfo.pSignalSemaphores = nullptr;
694
695 // TODO(jmadill): Investigate how to properly submit command buffers.
Jamie Madill4c26fc22017-02-24 11:04:10 -0500696 ANGLE_TRY(submit(submitInfo));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500697
Jamie Madillf651c772017-02-21 15:03:51 -0500698 return vk::NoError();
699}
700
Jamie Madill7f738d42017-11-20 17:06:27 -0500701vk::Error RendererVk::submitAndFinishCommandBuffer(vk::CommandBufferAndState *commandBuffer)
Jamie Madillf651c772017-02-21 15:03:51 -0500702{
703 ANGLE_TRY(submitCommandBuffer(commandBuffer));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500704 ANGLE_TRY(finish());
Jamie Madill4d0bf552016-12-28 15:45:24 -0500705
706 return vk::NoError();
707}
708
Jamie Madill7f738d42017-11-20 17:06:27 -0500709vk::Error RendererVk::submitCommandsWithSync(vk::CommandBufferAndState *commandBuffer,
Jamie Madille918de22017-04-12 10:21:11 -0400710 const vk::Semaphore &waitSemaphore,
711 const vk::Semaphore &signalSemaphore)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500712{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400713 ANGLE_TRY(commandBuffer->end());
714
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500715 VkPipelineStageFlags waitStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
716
717 VkSubmitInfo submitInfo;
718 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
719 submitInfo.pNext = nullptr;
720 submitInfo.waitSemaphoreCount = 1;
Jamie Madille918de22017-04-12 10:21:11 -0400721 submitInfo.pWaitSemaphores = waitSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500722 submitInfo.pWaitDstStageMask = &waitStageMask;
723 submitInfo.commandBufferCount = 1;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400724 submitInfo.pCommandBuffers = commandBuffer->ptr();
Jamie Madille918de22017-04-12 10:21:11 -0400725 submitInfo.signalSemaphoreCount = 1;
726 submitInfo.pSignalSemaphores = signalSemaphore.ptr();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500727
728 // TODO(jmadill): Investigate how to properly queue command buffer work.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400729 ANGLE_TRY(submitFrame(submitInfo));
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500730
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500731 return vk::NoError();
732}
733
Jamie Madill4c26fc22017-02-24 11:04:10 -0500734vk::Error RendererVk::finish()
735{
736 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500737 ANGLE_VK_TRY(vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400738 freeAllInFlightResources();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500739 return vk::NoError();
740}
741
Jamie Madill0c0dc342017-03-24 14:18:51 -0400742void RendererVk::freeAllInFlightResources()
743{
744 for (auto &fence : mInFlightFences)
745 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500746 fence.get().destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400747 }
748 mInFlightFences.clear();
749
750 for (auto &command : mInFlightCommands)
751 {
Jamie Madill7f738d42017-11-20 17:06:27 -0500752 command.get().destroy(mDevice, mCommandPool);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400753 }
754 mInFlightCommands.clear();
755
756 for (auto &garbage : mGarbage)
757 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400758 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400759 }
760 mGarbage.clear();
761}
762
Jamie Madill4c26fc22017-02-24 11:04:10 -0500763vk::Error RendererVk::checkInFlightCommands()
764{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400765 size_t finishedIndex = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500766
Jamie Madill4c26fc22017-02-24 11:04:10 -0500767 // Check if any in-flight command buffers are finished.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400768 for (size_t index = 0; index < mInFlightFences.size(); index++)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500769 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400770 auto *inFlightFence = &mInFlightFences[index];
Jamie Madill4c26fc22017-02-24 11:04:10 -0500771
Jamie Madill0c0dc342017-03-24 14:18:51 -0400772 VkResult result = inFlightFence->get().getStatus(mDevice);
773 if (result == VK_NOT_READY)
774 break;
775 ANGLE_VK_TRY(result);
776 finishedIndex = index + 1;
777
778 // Release the fence handle.
779 // TODO(jmadill): Re-use fences.
Jamie Madill7f738d42017-11-20 17:06:27 -0500780 inFlightFence->get().destroy(mDevice);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500781 }
782
Jamie Madill0c0dc342017-03-24 14:18:51 -0400783 if (finishedIndex == 0)
784 return vk::NoError();
Jamie Madillf651c772017-02-21 15:03:51 -0500785
Jamie Madill0c0dc342017-03-24 14:18:51 -0400786 Serial finishedSerial = mInFlightFences[finishedIndex - 1].queueSerial();
787 mInFlightFences.erase(mInFlightFences.begin(), mInFlightFences.begin() + finishedIndex);
788
789 size_t completedCBIndex = 0;
790 for (size_t cbIndex = 0; cbIndex < mInFlightCommands.size(); ++cbIndex)
791 {
792 auto *inFlightCB = &mInFlightCommands[cbIndex];
793 if (inFlightCB->queueSerial() > finishedSerial)
794 break;
795
796 completedCBIndex = cbIndex + 1;
Jamie Madill7f738d42017-11-20 17:06:27 -0500797 inFlightCB->get().destroy(mDevice, mCommandPool);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400798 }
799
800 if (completedCBIndex == 0)
801 return vk::NoError();
802
803 mInFlightCommands.erase(mInFlightCommands.begin(),
804 mInFlightCommands.begin() + completedCBIndex);
805
806 size_t freeIndex = 0;
807 for (; freeIndex < mGarbage.size(); ++freeIndex)
808 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400809 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, finishedSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -0400810 break;
811 }
812
813 // Remove the entries from the garbage list - they should be ready to go.
814 if (freeIndex > 0)
815 {
816 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -0500817 }
818
Jamie Madill4c26fc22017-02-24 11:04:10 -0500819 return vk::NoError();
820}
821
822vk::Error RendererVk::submit(const VkSubmitInfo &submitInfo)
823{
Jamie Madill0c0dc342017-03-24 14:18:51 -0400824 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, VK_NULL_HANDLE));
Jamie Madill4c26fc22017-02-24 11:04:10 -0500825
826 // Store this command buffer in the in-flight list.
Jamie Madill0c0dc342017-03-24 14:18:51 -0400827 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500828
829 // Sanity check.
830 ASSERT(mInFlightCommands.size() < 1000u);
831
Jamie Madill0c0dc342017-03-24 14:18:51 -0400832 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400833 // TODO(jmadill): Overflow check.
834 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400835
836 return vk::NoError();
837}
838
839vk::Error RendererVk::submitFrame(const VkSubmitInfo &submitInfo)
840{
841 VkFenceCreateInfo createInfo;
842 createInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
843 createInfo.pNext = nullptr;
844 createInfo.flags = 0;
845
846 vk::Fence fence;
847 ANGLE_TRY(fence.init(mDevice, createInfo));
848
849 ANGLE_VK_TRY(vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
850
851 // Store this command buffer in the in-flight list.
852 mInFlightFences.emplace_back(std::move(fence), mCurrentQueueSerial);
853 mInFlightCommands.emplace_back(std::move(mCommandBuffer), mCurrentQueueSerial);
854
855 // Sanity check.
856 ASSERT(mInFlightCommands.size() < 1000u);
857
858 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400859 // TODO(jmadill): Overflow check.
860 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -0400861
862 ANGLE_TRY(checkInFlightCommands());
863
Jamie Madill4c26fc22017-02-24 11:04:10 -0500864 return vk::NoError();
865}
866
Jamie Madill5deea722017-02-16 10:44:46 -0500867vk::Error RendererVk::createStagingImage(TextureDimension dimension,
868 const vk::Format &format,
869 const gl::Extents &extent,
Jamie Madill035fd6b2017-10-03 15:43:22 -0400870 vk::StagingUsage usage,
Jamie Madill5deea722017-02-16 10:44:46 -0500871 vk::StagingImage *imageOut)
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500872{
Jamie Madill035fd6b2017-10-03 15:43:22 -0400873 ANGLE_TRY(imageOut->init(mDevice, mCurrentQueueFamilyIndex, mMemoryProperties, dimension,
Jamie Madill1d7be502017-10-29 18:06:50 -0400874 format.vkTextureFormat, extent, usage));
Jamie Madill5deea722017-02-16 10:44:46 -0500875 return vk::NoError();
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500876}
877
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500878GlslangWrapper *RendererVk::getGlslangWrapper()
879{
880 return mGlslangWrapper;
881}
882
Jamie Madill4c26fc22017-02-24 11:04:10 -0500883Serial RendererVk::getCurrentQueueSerial() const
884{
885 return mCurrentQueueSerial;
886}
887
Jamie Madill1b038242017-11-01 15:14:36 -0400888gl::Error RendererVk::ensureInRenderPass(const gl::Context *context, FramebufferVk *framebufferVk)
889{
890 if (mCurrentRenderPassFramebuffer == framebufferVk)
891 {
892 return gl::NoError();
893 }
894
895 if (mCurrentRenderPassFramebuffer)
896 {
897 endRenderPass();
898 }
Jamie Madill9f2a8612017-11-30 12:43:09 -0500899 ANGLE_TRY(framebufferVk->beginRenderPass(context, this, &mCommandBuffer, mCurrentQueueSerial));
Jamie Madill1b038242017-11-01 15:14:36 -0400900 mCurrentRenderPassFramebuffer = framebufferVk;
901 return gl::NoError();
902}
903
904void RendererVk::endRenderPass()
905{
906 if (mCurrentRenderPassFramebuffer)
907 {
908 ASSERT(mCommandBuffer.started());
909 mCommandBuffer.endRenderPass();
910 mCurrentRenderPassFramebuffer = nullptr;
911 }
912}
913
Jamie Madill7bd16662017-10-28 19:40:50 -0400914void RendererVk::onReleaseRenderPass(const FramebufferVk *framebufferVk)
915{
916 if (mCurrentRenderPassFramebuffer == framebufferVk)
917 {
918 endRenderPass();
919 }
920}
921
Jamie Madill97760352017-11-09 13:08:29 -0500922bool RendererVk::isResourceInUse(const ResourceVk &resource)
923{
924 return isSerialInUse(resource.getQueueSerial());
925}
926
927bool RendererVk::isSerialInUse(Serial serial)
928{
929 return serial > mLastCompletedQueueSerial;
930}
931
Jamie Madill9f2a8612017-11-30 12:43:09 -0500932vk::Error RendererVk::getCompatibleRenderPass(const vk::RenderPassDesc &desc,
933 vk::RenderPass **renderPassOut)
934{
935 return mRenderPassCache.getCompatibleRenderPass(mDevice, mCurrentQueueSerial, desc,
936 renderPassOut);
937}
938
Jamie Madillbef918c2017-12-13 13:11:30 -0500939vk::Error RendererVk::getRenderPassWithOps(const vk::RenderPassDesc &desc,
940 const vk::AttachmentOpsArray &ops,
941 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -0500942{
Jamie Madillbef918c2017-12-13 13:11:30 -0500943 return mRenderPassCache.getRenderPassWithOps(mDevice, mCurrentQueueSerial, desc, ops,
944 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500945}
946
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400947} // namespace rx