blob: 82330bcd51ce077d2f67d3b7f6499ec77cd8941b [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
Jamie Madill3c424b42018-01-19 12:35:09 -050013#include "libANGLE/renderer/vulkan/vk_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050014
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040019#include "libANGLE/Display.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050020#include "libANGLE/renderer/driver_utils.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050021#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050022#include "libANGLE/renderer/vulkan/CompilerVk.h"
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040023#include "libANGLE/renderer/vulkan/DisplayVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050024#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050025#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madillffa4cbb2018-01-23 13:04:07 -050026#include "libANGLE/renderer/vulkan/ProgramVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050027#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Luc Ferrone4741fd2018-01-25 13:25:27 -050028#include "libANGLE/renderer/vulkan/vk_caps_utils.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050029#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050030#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040031
Shahbaz Youssefi61656022018-10-24 15:00:50 -040032#include "third_party/trace_event/trace_event.h"
33
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070034// Consts
35namespace
36{
37const uint32_t kMockVendorID = 0xba5eba11;
38const uint32_t kMockDeviceID = 0xf005ba11;
39constexpr char kMockDeviceName[] = "Vulkan Mock Device";
Shahbaz Youssefi61656022018-10-24 15:00:50 -040040constexpr size_t kInFlightCommandsLimit = 100u;
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070041} // anonymous namespace
42
Jamie Madill9e54b5a2016-05-25 12:57:39 -040043namespace rx
44{
45
Jamie Madille09bd5d2016-11-29 16:20:35 -050046namespace
47{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040048// We currently only allocate 2 uniform buffer per descriptor set, one for the fragment shader and
49// one for the vertex shader.
50constexpr size_t kUniformBufferDescriptorsPerDescriptorSet = 2;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040051// Update the pipeline cache every this many swaps (if 60fps, this means every 10 minutes)
52static constexpr uint32_t kPipelineCacheVkUpdatePeriod = 10 * 60 * 60;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040053// Wait a maximum of 10s. If that times out, we declare it a failure.
54static constexpr uint64_t kMaxFenceWaitTimeNs = 10'000'000'000llu;
Jamie Madille09bd5d2016-11-29 16:20:35 -050055
Omar El Sheikh26c61b22018-06-29 12:50:59 -060056bool ShouldEnableMockICD(const egl::AttributeMap &attribs)
57{
58#if !defined(ANGLE_PLATFORM_ANDROID)
59 // Mock ICD does not currently run on Android
60 return (attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
61 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE) ==
62 EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE);
63#else
64 return false;
65#endif // !defined(ANGLE_PLATFORM_ANDROID)
66}
67
Jamie Madille09bd5d2016-11-29 16:20:35 -050068VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
69 const std::vector<const char *> &enabledExtensionNames)
70{
71 // Compile the extensions names into a set.
72 std::set<std::string> extensionNames;
73 for (const auto &extensionProp : extensionProps)
74 {
75 extensionNames.insert(extensionProp.extensionName);
76 }
77
Jamie Madillacf2f3a2017-11-21 19:22:44 -050078 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050079 {
80 if (extensionNames.count(extensionName) == 0)
81 {
82 return VK_ERROR_EXTENSION_NOT_PRESENT;
83 }
84 }
85
86 return VK_SUCCESS;
87}
88
Tobin Ehlis3a181e32018-08-29 15:17:05 -060089// Array of Validation error/warning messages that will be ignored, should include bugID
90constexpr std::array<const char *, 1> kSkippedMessages = {
91 // http://anglebug.com/2796
92 " [ UNASSIGNED-CoreValidation-Shader-PointSizeMissing ] Object: VK_NULL_HANDLE (Type = 19) "
93 "| Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader "
94 "corresponding to VK_SHADER_STAGE_VERTEX_BIT."};
95
96// Suppress validation errors that are known
97// return "true" if given code/prefix/message is known, else return "false"
98bool IsIgnoredDebugMessage(const char *message)
99{
100 for (const auto &msg : kSkippedMessages)
101 {
102 if (strcmp(msg, message) == 0)
103 {
104 return true;
105 }
106 }
107 return false;
108}
109
Yuly Novikov199f4292018-01-19 19:04:05 -0500110VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
111 VkDebugReportObjectTypeEXT objectType,
112 uint64_t object,
113 size_t location,
114 int32_t messageCode,
115 const char *layerPrefix,
116 const char *message,
117 void *userData)
Jamie Madill0448ec82016-12-23 13:41:47 -0500118{
Tobin Ehlis3a181e32018-08-29 15:17:05 -0600119 if (IsIgnoredDebugMessage(message))
120 {
121 return VK_FALSE;
122 }
Jamie Madill0448ec82016-12-23 13:41:47 -0500123 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
124 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500125 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500126#if !defined(NDEBUG)
127 // Abort the call in Debug builds.
128 return VK_TRUE;
129#endif
130 }
131 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
132 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500133 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500134 }
135 else
136 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500137 // Uncomment this if you want Vulkan spam.
138 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500139 }
140
141 return VK_FALSE;
142}
143
Yuly Novikov199f4292018-01-19 19:04:05 -0500144// If we're loading the validation layers, we could be running from any random directory.
145// Change to the executable directory so we can find the layers, then change back to the
146// previous directory to be safe we don't disrupt the application.
147class ScopedVkLoaderEnvironment : angle::NonCopyable
148{
149 public:
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600150 ScopedVkLoaderEnvironment(bool enableValidationLayers, bool enableMockICD)
151 : mEnableValidationLayers(enableValidationLayers),
152 mEnableMockICD(enableMockICD),
153 mChangedCWD(false),
154 mChangedICDPath(false)
Yuly Novikov199f4292018-01-19 19:04:05 -0500155 {
156// Changing CWD and setting environment variables makes no sense on Android,
157// since this code is a part of Java application there.
158// Android Vulkan loader doesn't need this either.
159#if !defined(ANGLE_PLATFORM_ANDROID)
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600160 if (enableMockICD)
161 {
162 // Override environment variable to use built Mock ICD
163 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
164 mPreviousICDPath = angle::GetEnvironmentVar(g_VkICDPathEnv);
165 mChangedICDPath = angle::SetEnvironmentVar(g_VkICDPathEnv, ANGLE_VK_ICD_JSON);
166 if (!mChangedICDPath)
167 {
168 ERR() << "Error setting Path for Mock/Null Driver.";
169 mEnableMockICD = false;
170 }
171 }
Jamie Madill46848422018-08-09 10:46:06 -0400172 if (mEnableValidationLayers || mEnableMockICD)
Yuly Novikov199f4292018-01-19 19:04:05 -0500173 {
174 const auto &cwd = angle::GetCWD();
175 if (!cwd.valid())
176 {
177 ERR() << "Error getting CWD for Vulkan layers init.";
178 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400179 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500180 }
181 else
182 {
183 mPreviousCWD = cwd.value();
184 const char *exeDir = angle::GetExecutableDirectory();
185 mChangedCWD = angle::SetCWD(exeDir);
186 if (!mChangedCWD)
187 {
188 ERR() << "Error setting CWD for Vulkan layers init.";
189 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400190 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500191 }
192 }
193 }
194
195 // Override environment variable to use the ANGLE layers.
196 if (mEnableValidationLayers)
197 {
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700198 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_DATA_DIR))
Yuly Novikov199f4292018-01-19 19:04:05 -0500199 {
200 ERR() << "Error setting environment for Vulkan layers init.";
201 mEnableValidationLayers = false;
202 }
203 }
204#endif // !defined(ANGLE_PLATFORM_ANDROID)
205 }
206
207 ~ScopedVkLoaderEnvironment()
208 {
209 if (mChangedCWD)
210 {
211#if !defined(ANGLE_PLATFORM_ANDROID)
212 ASSERT(mPreviousCWD.valid());
213 angle::SetCWD(mPreviousCWD.value().c_str());
214#endif // !defined(ANGLE_PLATFORM_ANDROID)
215 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600216 if (mChangedICDPath)
217 {
Omar El Sheikh80d4ef12018-07-13 17:08:19 -0600218 if (mPreviousICDPath.value().empty())
219 {
220 angle::UnsetEnvironmentVar(g_VkICDPathEnv);
221 }
222 else
223 {
224 angle::SetEnvironmentVar(g_VkICDPathEnv, mPreviousICDPath.value().c_str());
225 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600226 }
Yuly Novikov199f4292018-01-19 19:04:05 -0500227 }
228
Jamie Madillaaca96e2018-06-12 10:19:48 -0400229 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
Yuly Novikov199f4292018-01-19 19:04:05 -0500230
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600231 bool canEnableMockICD() const { return mEnableMockICD; }
232
Yuly Novikov199f4292018-01-19 19:04:05 -0500233 private:
234 bool mEnableValidationLayers;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600235 bool mEnableMockICD;
Yuly Novikov199f4292018-01-19 19:04:05 -0500236 bool mChangedCWD;
237 Optional<std::string> mPreviousCWD;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600238 bool mChangedICDPath;
239 Optional<std::string> mPreviousICDPath;
Yuly Novikov199f4292018-01-19 19:04:05 -0500240};
241
Jamie Madill21061022018-07-12 23:56:30 -0400242void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
243 bool preferMockICD,
244 VkPhysicalDevice *physicalDeviceOut,
245 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
246{
247 ASSERT(!physicalDevices.empty());
248 if (preferMockICD)
249 {
250 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
251 {
252 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
253 if ((kMockVendorID == physicalDevicePropertiesOut->vendorID) &&
254 (kMockDeviceID == physicalDevicePropertiesOut->deviceID) &&
255 (strcmp(kMockDeviceName, physicalDevicePropertiesOut->deviceName) == 0))
256 {
257 *physicalDeviceOut = physicalDevice;
258 return;
259 }
260 }
261 WARN() << "Vulkan Mock Driver was requested but Mock Device was not found. Using default "
262 "physicalDevice instead.";
263 }
264
265 // Fall back to first device.
266 *physicalDeviceOut = physicalDevices[0];
267 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
268}
Jamie Madill0da73fe2018-10-02 09:31:39 -0400269
270// Initially dumping the command graphs is disabled.
271constexpr bool kEnableCommandGraphDiagnostics = false;
Jamie Madille09bd5d2016-11-29 16:20:35 -0500272} // anonymous namespace
273
Jamie Madill49ac74b2017-12-21 14:42:33 -0500274// CommandBatch implementation.
Jamie Madillaaca96e2018-06-12 10:19:48 -0400275RendererVk::CommandBatch::CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500276
Jamie Madillaaca96e2018-06-12 10:19:48 -0400277RendererVk::CommandBatch::~CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500278
279RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
280 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
281{
282}
283
284RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
285{
286 std::swap(commandPool, other.commandPool);
287 std::swap(fence, other.fence);
288 std::swap(serial, other.serial);
289 return *this;
290}
291
Jamie Madillbea35a62018-07-05 11:54:10 -0400292void RendererVk::CommandBatch::destroy(VkDevice device)
293{
294 commandPool.destroy(device);
295 fence.destroy(device);
296}
297
Jamie Madill9f2a8612017-11-30 12:43:09 -0500298// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500299RendererVk::RendererVk()
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400300 : mDisplay(nullptr),
301 mCapsInitialized(false),
Jamie Madill0448ec82016-12-23 13:41:47 -0500302 mInstance(VK_NULL_HANDLE),
303 mEnableValidationLayers(false),
Jamie Madill0ea96212018-10-30 15:14:51 -0400304 mEnableMockICD(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500305 mDebugReportCallback(VK_NULL_HANDLE),
306 mPhysicalDevice(VK_NULL_HANDLE),
307 mQueue(VK_NULL_HANDLE),
308 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
309 mDevice(VK_NULL_HANDLE),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400310 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400311 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400312 mDeviceLost(false),
Jamie Madill0da73fe2018-10-02 09:31:39 -0400313 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400314 mCommandGraph(kEnableCommandGraphDiagnostics),
315 mGpuEventsEnabled(false),
316 mGpuClockSync{std::numeric_limits<double>::max(), std::numeric_limits<double>::max()},
317 mGpuEventTimestampOrigin(0)
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400318{
319}
320
321RendererVk::~RendererVk()
322{
Jamie Madill21061022018-07-12 23:56:30 -0400323}
324
325void RendererVk::onDestroy(vk::Context *context)
326{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500327 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500328 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500329 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
Jamie Madill21061022018-07-12 23:56:30 -0400330 (void)finish(context);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500331 }
332
Jamie Madillc7918ce2018-06-13 13:25:31 -0400333 mPipelineLayoutCache.destroy(mDevice);
334 mDescriptorSetLayoutCache.destroy(mDevice);
335
Jamie Madill9f2a8612017-11-30 12:43:09 -0500336 mRenderPassCache.destroy(mDevice);
Shahbaz Youssefic30f45d2018-11-12 16:37:59 -0500337 mGraphicsPipelineCache.destroy(mDevice);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400338 mPipelineCacheVk.destroy(mDevice);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400339 mSubmitSemaphorePool.destroy(mDevice);
Jamie Madilld47044a2018-04-27 11:45:03 -0400340 mShaderLibrary.destroy(mDevice);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400341 mGpuEventQueryPool.destroy(mDevice);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500342
Jamie Madill06ca6342018-07-12 15:56:53 -0400343 GlslangWrapper::Release();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500344
Jamie Madill5deea722017-02-16 10:44:46 -0500345 if (mCommandPool.valid())
346 {
347 mCommandPool.destroy(mDevice);
348 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500349
350 if (mDevice)
351 {
352 vkDestroyDevice(mDevice, nullptr);
353 mDevice = VK_NULL_HANDLE;
354 }
355
Jamie Madill0448ec82016-12-23 13:41:47 -0500356 if (mDebugReportCallback)
357 {
358 ASSERT(mInstance);
359 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
360 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
361 ASSERT(destroyDebugReportCallback);
362 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
363 }
364
Jamie Madill4d0bf552016-12-28 15:45:24 -0500365 if (mInstance)
366 {
367 vkDestroyInstance(mInstance, nullptr);
368 mInstance = VK_NULL_HANDLE;
369 }
370
Omar El Sheikheb4b8692018-07-17 10:55:40 -0600371 mMemoryProperties.destroy();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500372 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500373}
374
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400375void RendererVk::notifyDeviceLost()
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400376{
377 mDeviceLost = true;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400378
379 mCommandGraph.clear();
380 mLastSubmittedQueueSerial = mCurrentQueueSerial;
381 mCurrentQueueSerial = mQueueSerialFactory.generate();
382 freeAllInFlightResources();
383
384 mDisplay->notifyDeviceLost();
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400385}
386
387bool RendererVk::isDeviceLost() const
388{
389 return mDeviceLost;
390}
391
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400392angle::Result RendererVk::initialize(DisplayVk *displayVk,
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400393 egl::Display *display,
Jamie Madill21061022018-07-12 23:56:30 -0400394 const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500395{
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400396 mDisplay = display;
397 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600398 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs),
399 ShouldEnableMockICD(attribs));
Yuly Novikov199f4292018-01-19 19:04:05 -0500400 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
Jamie Madill0ea96212018-10-30 15:14:51 -0400401 mEnableMockICD = scopedEnvironment.canEnableMockICD();
Jamie Madilla66779f2017-01-06 10:43:44 -0500402
Jamie Madill0448ec82016-12-23 13:41:47 -0500403 // Gather global layer properties.
404 uint32_t instanceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400405 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
Jamie Madill0448ec82016-12-23 13:41:47 -0500406
407 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
408 if (instanceLayerCount > 0)
409 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400410 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
411 instanceLayerProps.data()));
Jamie Madill0448ec82016-12-23 13:41:47 -0500412 }
413
Jamie Madille09bd5d2016-11-29 16:20:35 -0500414 uint32_t instanceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400415 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400416 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500417
418 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
419 if (instanceExtensionCount > 0)
420 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400421 ANGLE_VK_TRY(displayVk,
422 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
423 instanceExtensionProps.data()));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500424 }
425
Yuly Novikov199f4292018-01-19 19:04:05 -0500426 const char *const *enabledLayerNames = nullptr;
427 uint32_t enabledLayerCount = 0;
Jamie Madill0448ec82016-12-23 13:41:47 -0500428 if (mEnableValidationLayers)
429 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500430 bool layersRequested =
431 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
432 mEnableValidationLayers = GetAvailableValidationLayers(
433 instanceLayerProps, layersRequested, &enabledLayerNames, &enabledLayerCount);
Jamie Madill0448ec82016-12-23 13:41:47 -0500434 }
435
Jamie Madille09bd5d2016-11-29 16:20:35 -0500436 std::vector<const char *> enabledInstanceExtensions;
437 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500438 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500439
Jamie Madill0448ec82016-12-23 13:41:47 -0500440 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
441 if (mEnableValidationLayers)
442 {
443 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
444 }
445
Jamie Madille09bd5d2016-11-29 16:20:35 -0500446 // Verify the required extensions are in the extension names set. Fail if not.
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400447 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400448 VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500449
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400450 VkApplicationInfo applicationInfo = {};
Jamie Madill327ba852016-11-30 12:38:28 -0500451 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
Jamie Madill327ba852016-11-30 12:38:28 -0500452 applicationInfo.pApplicationName = "ANGLE";
453 applicationInfo.applicationVersion = 1;
454 applicationInfo.pEngineName = "ANGLE";
455 applicationInfo.engineVersion = 1;
456 applicationInfo.apiVersion = VK_API_VERSION_1_0;
457
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400458 VkInstanceCreateInfo instanceInfo = {};
Jamie Madill327ba852016-11-30 12:38:28 -0500459 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
Jamie Madill327ba852016-11-30 12:38:28 -0500460 instanceInfo.flags = 0;
461 instanceInfo.pApplicationInfo = &applicationInfo;
462
Jamie Madille09bd5d2016-11-29 16:20:35 -0500463 // Enable requested layers and extensions.
464 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
465 instanceInfo.ppEnabledExtensionNames =
466 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Yuly Novikov199f4292018-01-19 19:04:05 -0500467 instanceInfo.enabledLayerCount = enabledLayerCount;
468 instanceInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill327ba852016-11-30 12:38:28 -0500469
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400470 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
Jamie Madill327ba852016-11-30 12:38:28 -0500471
Jamie Madill0448ec82016-12-23 13:41:47 -0500472 if (mEnableValidationLayers)
473 {
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400474 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
Jamie Madill0448ec82016-12-23 13:41:47 -0500475
476 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
Jamie Madill0448ec82016-12-23 13:41:47 -0500477 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
478 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
479 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
480 debugReportInfo.pfnCallback = &DebugReportCallback;
481 debugReportInfo.pUserData = this;
482
483 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
484 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
485 ASSERT(createDebugReportCallback);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400486 ANGLE_VK_TRY(displayVk, createDebugReportCallback(mInstance, &debugReportInfo, nullptr,
487 &mDebugReportCallback));
Jamie Madill0448ec82016-12-23 13:41:47 -0500488 }
489
Jamie Madill4d0bf552016-12-28 15:45:24 -0500490 uint32_t physicalDeviceCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400491 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
492 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500493
494 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700495 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400496 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
497 physicalDevices.data()));
Jamie Madill0ea96212018-10-30 15:14:51 -0400498 ChoosePhysicalDevice(physicalDevices, mEnableMockICD, &mPhysicalDevice,
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700499 &mPhysicalDeviceProperties);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500500
Jamie Madill30b5d842018-08-31 17:19:12 -0400501 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
502
Jamie Madill4d0bf552016-12-28 15:45:24 -0500503 // Ensure we can find a graphics queue family.
504 uint32_t queueCount = 0;
505 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
506
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400507 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500508
509 mQueueFamilyProperties.resize(queueCount);
510 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
511 mQueueFamilyProperties.data());
512
513 size_t graphicsQueueFamilyCount = false;
514 uint32_t firstGraphicsQueueFamily = 0;
515 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
516 {
517 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
518 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
519 {
520 ASSERT(queueInfo.queueCount > 0);
521 graphicsQueueFamilyCount++;
522 if (firstGraphicsQueueFamily == 0)
523 {
524 firstGraphicsQueueFamily = familyIndex;
525 }
526 break;
527 }
528 }
529
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400530 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500531
Jamie Madill12222072018-07-11 14:59:48 -0400532 initFeatures();
533
Jamie Madill4d0bf552016-12-28 15:45:24 -0500534 // If only one queue family, go ahead and initialize the device. If there is more than one
535 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
536 if (graphicsQueueFamilyCount == 1)
537 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400538 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500539 }
540
Jamie Madill035fd6b2017-10-03 15:43:22 -0400541 // Store the physical device memory properties so we can find the right memory pools.
542 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500543
Jamie Madill06ca6342018-07-12 15:56:53 -0400544 GlslangWrapper::Initialize();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500545
Jamie Madill6a89d222017-11-02 11:59:51 -0400546 // Initialize the format table.
Shahbaz Youssefi092481a2018-11-08 00:25:50 -0500547 mFormatTable.initialize(mPhysicalDevice, mPhysicalDeviceProperties, mFeatures,
548 &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
Jamie Madill6a89d222017-11-02 11:59:51 -0400549
Jamie Madill21061022018-07-12 23:56:30 -0400550 return angle::Result::Continue();
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400551}
552
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400553angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500554{
555 uint32_t deviceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400556 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400557 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500558
559 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
560 if (deviceLayerCount > 0)
561 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400562 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
563 deviceLayerProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500564 }
565
566 uint32_t deviceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400567 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
568 &deviceExtensionCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500569
570 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
571 if (deviceExtensionCount > 0)
572 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400573 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
574 &deviceExtensionCount,
575 deviceExtensionProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500576 }
577
Yuly Novikov199f4292018-01-19 19:04:05 -0500578 const char *const *enabledLayerNames = nullptr;
579 uint32_t enabledLayerCount = 0;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500580 if (mEnableValidationLayers)
581 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500582 mEnableValidationLayers = GetAvailableValidationLayers(
583 deviceLayerProps, false, &enabledLayerNames, &enabledLayerCount);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500584 }
585
586 std::vector<const char *> enabledDeviceExtensions;
587 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
588
Luc Ferronbf6dc372018-06-28 15:24:19 -0400589 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
590 if (getFeatures().flipViewportY)
591 {
592 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
593 }
594
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400595 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500596
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400597 // Select additional features to be enabled
598 VkPhysicalDeviceFeatures enabledFeatures = {};
599 enabledFeatures.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
600
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400601 VkDeviceQueueCreateInfo queueCreateInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500602
603 float zeroPriority = 0.0f;
604
605 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500606 queueCreateInfo.flags = 0;
607 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
608 queueCreateInfo.queueCount = 1;
609 queueCreateInfo.pQueuePriorities = &zeroPriority;
610
611 // Initialize the device
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400612 VkDeviceCreateInfo createInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500613
Jamie Madill50cf2be2018-06-15 09:46:57 -0400614 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
Jamie Madill50cf2be2018-06-15 09:46:57 -0400615 createInfo.flags = 0;
616 createInfo.queueCreateInfoCount = 1;
617 createInfo.pQueueCreateInfos = &queueCreateInfo;
Yuly Novikov199f4292018-01-19 19:04:05 -0500618 createInfo.enabledLayerCount = enabledLayerCount;
619 createInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500620 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
621 createInfo.ppEnabledExtensionNames =
622 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400623 createInfo.pEnabledFeatures = &enabledFeatures;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500624
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400625 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500626
627 mCurrentQueueFamilyIndex = queueFamilyIndex;
628
629 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
630
631 // Initialize the command pool now that we know the queue family index.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400632 VkCommandPoolCreateInfo commandPoolInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -0500633 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500634 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500635 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
636
Yuly Novikov27780292018-11-09 11:19:49 -0500637 ANGLE_VK_TRY(displayVk, mCommandPool.init(mDevice, commandPoolInfo));
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400638
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400639 // Initialize the vulkan pipeline cache.
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400640 ANGLE_TRY(initPipelineCacheVk(displayVk));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500641
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400642 // Initialize the submission semaphore pool.
643 ANGLE_TRY(mSubmitSemaphorePool.init(displayVk, vk::kDefaultSemaphorePoolSize));
644
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400645#if ANGLE_ENABLE_VULKAN_GPU_TRACE_EVENTS
646 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
647 ASSERT(platform);
648
649 // GPU tracing workaround for anglebug.com/2927. The renderer should not emit gpu events during
650 // platform discovery.
651 const unsigned char *gpuEventsEnabled =
652 platform->getTraceCategoryEnabledFlag(platform, "gpu.angle.gpu");
653 mGpuEventsEnabled = gpuEventsEnabled && *gpuEventsEnabled;
654#endif
655
656 if (mGpuEventsEnabled)
657 {
658 // Calculate the difference between CPU and GPU clocks for GPU event reporting.
659 ANGLE_TRY(mGpuEventQueryPool.init(displayVk, VK_QUERY_TYPE_TIMESTAMP,
660 vk::kDefaultTimestampQueryPoolSize));
661 ANGLE_TRY(synchronizeCpuGpuTime(displayVk));
662 }
663
Jamie Madill21061022018-07-12 23:56:30 -0400664 return angle::Result::Continue();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500665}
666
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400667angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400668 VkSurfaceKHR surface,
669 uint32_t *presentQueueOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500670{
671 // We've already initialized a device, and can't re-create it unless it's never been used.
672 // TODO(jmadill): Handle the re-creation case if necessary.
673 if (mDevice != VK_NULL_HANDLE)
674 {
675 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
676
677 // Check if the current device supports present on this surface.
678 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400679 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400680 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
Jamie Madill4d0bf552016-12-28 15:45:24 -0500681 surface, &supportsPresent));
682
Jamie Madill6cad7732018-07-11 09:01:17 -0400683 if (supportsPresent == VK_TRUE)
684 {
685 *presentQueueOut = mCurrentQueueFamilyIndex;
Jamie Madill21061022018-07-12 23:56:30 -0400686 return angle::Result::Continue();
Jamie Madill6cad7732018-07-11 09:01:17 -0400687 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500688 }
689
690 // Find a graphics and present queue.
691 Optional<uint32_t> newPresentQueue;
692 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
693 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
694 {
695 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
696 if ((queueInfo.queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
697 {
698 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400699 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
700 mPhysicalDevice, queueIndex, surface, &supportsPresent));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500701
702 if (supportsPresent == VK_TRUE)
703 {
704 newPresentQueue = queueIndex;
705 break;
706 }
707 }
708 }
709
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400710 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
711 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500712
Jamie Madill6cad7732018-07-11 09:01:17 -0400713 *presentQueueOut = newPresentQueue.value();
Jamie Madill21061022018-07-12 23:56:30 -0400714 return angle::Result::Continue();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500715}
716
717std::string RendererVk::getVendorString() const
718{
Olli Etuahoc6a06182018-04-13 14:11:46 +0300719 return GetVendorString(mPhysicalDeviceProperties.vendorID);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500720}
721
Jamie Madille09bd5d2016-11-29 16:20:35 -0500722std::string RendererVk::getRendererDescription() const
723{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500724 std::stringstream strstr;
725
726 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
727
728 strstr << "Vulkan ";
729 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
730 strstr << VK_VERSION_MINOR(apiVersion) << ".";
731 strstr << VK_VERSION_PATCH(apiVersion);
732
Olli Etuahoc6a06182018-04-13 14:11:46 +0300733 strstr << "(";
734
735 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
736 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
737 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
738 // driver detection.
739 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
740 {
741 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
742 }
743
744 strstr << mPhysicalDeviceProperties.deviceName << ")";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500745
746 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500747}
748
Shahbaz Youssefi092481a2018-11-08 00:25:50 -0500749gl::Version RendererVk::getMaxSupportedESVersion() const
750{
751 // Declare GLES2 support if necessary features for GLES3 are missing
752 bool necessaryFeaturesForES3 = mPhysicalDeviceFeatures.inheritedQueries;
753
754 if (!necessaryFeaturesForES3)
755 {
756 return gl::Version(2, 0);
757 }
758
759 return gl::Version(3, 0);
760}
761
Jamie Madill12222072018-07-11 14:59:48 -0400762void RendererVk::initFeatures()
763{
Jamie Madillb36a4812018-09-25 10:15:11 -0400764// Use OpenGL line rasterization rules by default.
765// TODO(jmadill): Fix Android support. http://anglebug.com/2830
766#if defined(ANGLE_PLATFORM_ANDROID)
767 mFeatures.basicGLLineRasterization = false;
768#else
Jamie Madill12222072018-07-11 14:59:48 -0400769 mFeatures.basicGLLineRasterization = true;
Jamie Madillb36a4812018-09-25 10:15:11 -0400770#endif // defined(ANGLE_PLATFORM_ANDROID)
Jamie Madill12222072018-07-11 14:59:48 -0400771
Luc Ferronf786b702018-07-10 11:01:43 -0400772 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
773 // investigation. http://anglebug.com/2728
774 mFeatures.flipViewportY = !IsIntel(mPhysicalDeviceProperties.vendorID);
Frank Henigmanbeb669d2018-09-21 16:25:52 -0400775
776#ifdef ANGLE_PLATFORM_WINDOWS
777 // http://anglebug.com/2838
778 mFeatures.extraCopyBufferRegion = IsIntel(mPhysicalDeviceProperties.vendorID);
779#endif
Shahbaz Youssefid856ca42018-10-31 16:55:12 -0400780
781 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
782 platform->overrideFeaturesVk(platform, &mFeatures);
Jamie Madillfde74c02018-11-18 16:12:02 -0500783
784 // Work around incorrect NVIDIA point size range clamping.
785 // TODO(jmadill): Narrow driver range once fixed. http://anglebug.com/2970
786 if (IsNvidia(mPhysicalDeviceProperties.vendorID))
787 {
788 mFeatures.clampPointSize = true;
789 }
Jamie Madill12222072018-07-11 14:59:48 -0400790}
791
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400792void RendererVk::initPipelineCacheVkKey()
793{
794 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
795 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
796 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
797 // there is no '\0' in the result.
798 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
799 {
800 hashStream << std::hex << c;
801 }
802 // Add the vendor and device id too for good measure.
803 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
804 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
805
806 const std::string &hashString = hashStream.str();
807 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
808 hashString.length(), mPipelineCacheVkBlobKey.data());
809}
810
811angle::Result RendererVk::initPipelineCacheVk(DisplayVk *display)
812{
813 initPipelineCacheVkKey();
814
815 egl::BlobCache::Value initialData;
816 bool success = display->getBlobCache()->get(display->getScratchBuffer(),
817 mPipelineCacheVkBlobKey, &initialData);
818
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400819 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400820
821 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400822 pipelineCacheCreateInfo.flags = 0;
823 pipelineCacheCreateInfo.initialDataSize = success ? initialData.size() : 0;
824 pipelineCacheCreateInfo.pInitialData = success ? initialData.data() : nullptr;
825
Yuly Novikov27780292018-11-09 11:19:49 -0500826 ANGLE_VK_TRY(display, mPipelineCacheVk.init(mDevice, pipelineCacheCreateInfo));
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400827 return angle::Result::Continue();
828}
829
Jamie Madillacccc6c2016-05-03 17:22:10 -0400830void RendererVk::ensureCapsInitialized() const
831{
832 if (!mCapsInitialized)
833 {
Shahbaz Youssefic2b576d2018-10-12 14:45:34 -0400834 ASSERT(mCurrentQueueFamilyIndex < mQueueFamilyProperties.size());
835 vk::GenerateCaps(mPhysicalDeviceProperties, mPhysicalDeviceFeatures,
836 mQueueFamilyProperties[mCurrentQueueFamilyIndex], mNativeTextureCaps,
Jamie Madill30b5d842018-08-31 17:19:12 -0400837 &mNativeCaps, &mNativeExtensions, &mNativeLimitations);
Jamie Madillacccc6c2016-05-03 17:22:10 -0400838 mCapsInitialized = true;
839 }
840}
841
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400842void RendererVk::getSubmitWaitSemaphores(
843 vk::Context *context,
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400844 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> *waitSemaphores,
845 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> *waitStageMasks)
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400846{
847 if (mSubmitLastSignaledSemaphore.getSemaphore())
848 {
849 waitSemaphores->push_back(mSubmitLastSignaledSemaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400850 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400851
852 // Return the semaphore to the pool (which will remain valid and unused until the
853 // queue it's about to be waited on has finished execution).
854 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
855 }
856
857 for (vk::SemaphoreHelper &semaphore : mSubmitWaitSemaphores)
858 {
859 waitSemaphores->push_back(semaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400860 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
861
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400862 mSubmitSemaphorePool.freeSemaphore(context, &semaphore);
863 }
864 mSubmitWaitSemaphores.clear();
865}
866
Jamie Madillacccc6c2016-05-03 17:22:10 -0400867const gl::Caps &RendererVk::getNativeCaps() const
868{
869 ensureCapsInitialized();
870 return mNativeCaps;
871}
872
873const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
874{
875 ensureCapsInitialized();
876 return mNativeTextureCaps;
877}
878
879const gl::Extensions &RendererVk::getNativeExtensions() const
880{
881 ensureCapsInitialized();
882 return mNativeExtensions;
883}
884
885const gl::Limitations &RendererVk::getNativeLimitations() const
886{
887 ensureCapsInitialized();
888 return mNativeLimitations;
889}
890
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400891uint32_t RendererVk::getMaxActiveTextures()
892{
893 // TODO(lucferron): expose this limitation to GL in Context Caps
894 return std::min<uint32_t>(mPhysicalDeviceProperties.limits.maxPerStageDescriptorSamplers,
895 gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES);
896}
897
Jamie Madill49ac74b2017-12-21 14:42:33 -0500898const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500899{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500900 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500901}
902
Jamie Madill21061022018-07-12 23:56:30 -0400903angle::Result RendererVk::finish(vk::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500904{
Jamie Madill1f46bc12018-02-20 16:09:43 -0500905 if (!mCommandGraph.empty())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500906 {
Shahbaz Youssefi61656022018-10-24 15:00:50 -0400907 TRACE_EVENT0("gpu.angle", "RendererVk::finish");
908
Luc Ferron1617e692018-07-11 11:08:19 -0400909 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
910 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400911
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400912 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400913 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
914 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400915
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400916 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -0500917 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400918 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
919 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400920 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500921 submitInfo.commandBufferCount = 1;
Luc Ferron1617e692018-07-11 11:08:19 -0400922 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500923 submitInfo.signalSemaphoreCount = 0;
924 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500925
Jamie Madill21061022018-07-12 23:56:30 -0400926 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBatch.get())));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500927 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500928
Jamie Madill4c26fc22017-02-24 11:04:10 -0500929 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill21061022018-07-12 23:56:30 -0400930 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400931 freeAllInFlightResources();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400932
933 if (mGpuEventsEnabled)
934 {
Shahbaz Youssefi749589f2018-10-25 12:48:49 -0400935 // This loop should in practice execute once since the queue is already idle.
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400936 while (mInFlightGpuEventQueries.size() > 0)
937 {
938 ANGLE_TRY(checkCompletedGpuEvents(context));
939 }
Shahbaz Youssefi749589f2018-10-25 12:48:49 -0400940 // Recalculate the CPU/GPU time difference to account for clock drifting. Avoid unnecessary
941 // synchronization if there is no event to be adjusted (happens when finish() gets called
942 // multiple times towards the end of the application).
943 if (mGpuEvents.size() > 0)
944 {
945 ANGLE_TRY(synchronizeCpuGpuTime(context));
946 }
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400947 }
948
Jamie Madill21061022018-07-12 23:56:30 -0400949 return angle::Result::Continue();
Jamie Madill4c26fc22017-02-24 11:04:10 -0500950}
951
Jamie Madill0c0dc342017-03-24 14:18:51 -0400952void RendererVk::freeAllInFlightResources()
953{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500954 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -0400955 {
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400956 // On device loss we need to wait for fence to be signaled before destroying it
957 if (mDeviceLost)
958 {
959 VkResult status = batch.fence.wait(mDevice, kMaxFenceWaitTimeNs);
960 // If wait times out, it is probably not possible to recover from lost device
961 ASSERT(status == VK_SUCCESS || status == VK_ERROR_DEVICE_LOST);
962 }
Jamie Madill49ac74b2017-12-21 14:42:33 -0500963 batch.fence.destroy(mDevice);
964 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400965 }
966 mInFlightCommands.clear();
967
968 for (auto &garbage : mGarbage)
969 {
Jamie Madille88ec8e2017-10-31 17:18:14 -0400970 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400971 }
972 mGarbage.clear();
Shahbaz Youssefi61656022018-10-24 15:00:50 -0400973
974 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400975}
976
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -0400977angle::Result RendererVk::checkCompletedCommands(vk::Context *context)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500978{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500979 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -0500980
Jamie Madill49ac74b2017-12-21 14:42:33 -0500981 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -0500982 {
Yuly Novikov27780292018-11-09 11:19:49 -0500983 VkResult result = batch.fence.getStatus(mDevice);
984 if (result == VK_NOT_READY)
985 {
Jamie Madill0c0dc342017-03-24 14:18:51 -0400986 break;
Yuly Novikov27780292018-11-09 11:19:49 -0500987 }
988 ANGLE_VK_TRY(context, result);
Jamie Madill49ac74b2017-12-21 14:42:33 -0500989
Jamie Madill49ac74b2017-12-21 14:42:33 -0500990 ASSERT(batch.serial > mLastCompletedQueueSerial);
991 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -0400992
Jamie Madill49ac74b2017-12-21 14:42:33 -0500993 batch.fence.destroy(mDevice);
994 batch.commandPool.destroy(mDevice);
995 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -0500996 }
997
Jamie Madill49ac74b2017-12-21 14:42:33 -0500998 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -0400999
1000 size_t freeIndex = 0;
1001 for (; freeIndex < mGarbage.size(); ++freeIndex)
1002 {
Jamie Madill49ac74b2017-12-21 14:42:33 -05001003 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -04001004 break;
1005 }
1006
1007 // Remove the entries from the garbage list - they should be ready to go.
1008 if (freeIndex > 0)
1009 {
1010 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -05001011 }
1012
Jamie Madill21061022018-07-12 23:56:30 -04001013 return angle::Result::Continue();
Jamie Madill4c26fc22017-02-24 11:04:10 -05001014}
1015
Jamie Madill21061022018-07-12 23:56:30 -04001016angle::Result RendererVk::submitFrame(vk::Context *context,
1017 const VkSubmitInfo &submitInfo,
1018 vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001019{
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001020 VkFenceCreateInfo fenceInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -05001021 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001022 fenceInfo.flags = 0;
1023
Jamie Madillbea35a62018-07-05 11:54:10 -04001024 vk::Scoped<CommandBatch> scopedBatch(mDevice);
1025 CommandBatch &batch = scopedBatch.get();
Yuly Novikov27780292018-11-09 11:19:49 -05001026 ANGLE_VK_TRY(context, batch.fence.init(mDevice, fenceInfo));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001027
Jamie Madill21061022018-07-12 23:56:30 -04001028 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -05001029
1030 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -05001031 batch.commandPool = std::move(mCommandPool);
1032 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001033
Jamie Madillbea35a62018-07-05 11:54:10 -04001034 mInFlightCommands.emplace_back(scopedBatch.release());
Jamie Madill0c0dc342017-03-24 14:18:51 -04001035
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001036 // CPU should be throttled to avoid mInFlightCommands from growing too fast. That is done on
1037 // swap() though, and there could be multiple submissions in between (through glFlush() calls),
1038 // so the limit is larger than the expected number of images.
1039 ASSERT(mInFlightCommands.size() <= kInFlightCommandsLimit);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001040
1041 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -04001042 // TODO(jmadill): Overflow check.
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001043 mLastSubmittedQueueSerial = mCurrentQueueSerial;
Jamie Madillfb05bcb2017-06-07 15:43:18 -04001044 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -04001045
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001046 ANGLE_TRY(checkCompletedCommands(context));
Jamie Madill0c0dc342017-03-24 14:18:51 -04001047
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001048 if (mGpuEventsEnabled)
1049 {
1050 ANGLE_TRY(checkCompletedGpuEvents(context));
1051 }
1052
Jamie Madill49ac74b2017-12-21 14:42:33 -05001053 // Simply null out the command buffer here - it was allocated using the command pool.
1054 commandBuffer.releaseHandle();
1055
1056 // Reallocate the command pool for next frame.
1057 // TODO(jmadill): Consider reusing command pools.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001058 VkCommandPoolCreateInfo poolInfo = {};
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001059 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001060 poolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001061 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001062
Yuly Novikov27780292018-11-09 11:19:49 -05001063 ANGLE_VK_TRY(context, mCommandPool.init(mDevice, poolInfo));
1064 return angle::Result::Continue();
Jamie Madill4c26fc22017-02-24 11:04:10 -05001065}
1066
Jamie Madillaaca96e2018-06-12 10:19:48 -04001067bool RendererVk::isSerialInUse(Serial serial) const
Jamie Madill97760352017-11-09 13:08:29 -05001068{
1069 return serial > mLastCompletedQueueSerial;
1070}
1071
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001072angle::Result RendererVk::finishToSerial(vk::Context *context, Serial serial)
1073{
1074 if (!isSerialInUse(serial) || mInFlightCommands.empty())
1075 {
1076 return angle::Result::Continue();
1077 }
1078
1079 // Find the first batch with serial equal to or bigger than given serial (note that
1080 // the batch serials are unique, otherwise upper-bound would have been necessary).
1081 size_t batchIndex = mInFlightCommands.size() - 1;
1082 for (size_t i = 0; i < mInFlightCommands.size(); ++i)
1083 {
1084 if (mInFlightCommands[i].serial >= serial)
1085 {
1086 batchIndex = i;
1087 break;
1088 }
1089 }
1090 const CommandBatch &batch = mInFlightCommands[batchIndex];
1091
1092 // Wait for it finish
Yuly Novikov27780292018-11-09 11:19:49 -05001093 ANGLE_VK_TRY(context, batch.fence.wait(mDevice, kMaxFenceWaitTimeNs));
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001094
1095 // Clean up finished batches.
1096 return checkCompletedCommands(context);
1097}
1098
Jamie Madill21061022018-07-12 23:56:30 -04001099angle::Result RendererVk::getCompatibleRenderPass(vk::Context *context,
1100 const vk::RenderPassDesc &desc,
1101 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001102{
Jamie Madill21061022018-07-12 23:56:30 -04001103 return mRenderPassCache.getCompatibleRenderPass(context, mCurrentQueueSerial, desc,
Jamie Madill9f2a8612017-11-30 12:43:09 -05001104 renderPassOut);
1105}
1106
Jamie Madill21061022018-07-12 23:56:30 -04001107angle::Result RendererVk::getRenderPassWithOps(vk::Context *context,
1108 const vk::RenderPassDesc &desc,
1109 const vk::AttachmentOpsArray &ops,
1110 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001111{
Jamie Madill21061022018-07-12 23:56:30 -04001112 return mRenderPassCache.getRenderPassWithOps(context, mCurrentQueueSerial, desc, ops,
Jamie Madillbef918c2017-12-13 13:11:30 -05001113 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -05001114}
1115
Jamie Madilla5e06072018-05-18 14:36:05 -04001116vk::CommandGraph *RendererVk::getCommandGraph()
Jamie Madill49ac74b2017-12-21 14:42:33 -05001117{
Jamie Madilla5e06072018-05-18 14:36:05 -04001118 return &mCommandGraph;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001119}
1120
Jamie Madill21061022018-07-12 23:56:30 -04001121angle::Result RendererVk::flushCommandGraph(vk::Context *context, vk::CommandBuffer *commandBatch)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001122{
Jamie Madill21061022018-07-12 23:56:30 -04001123 return mCommandGraph.submitCommands(context, mCurrentQueueSerial, &mRenderPassCache,
Jamie Madill1f46bc12018-02-20 16:09:43 -05001124 &mCommandPool, commandBatch);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001125}
1126
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001127angle::Result RendererVk::flush(vk::Context *context)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001128{
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001129 if (mCommandGraph.empty())
1130 {
1131 return angle::Result::Continue();
1132 }
1133
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001134 TRACE_EVENT0("gpu.angle", "RendererVk::flush");
1135
Jamie Madillbea35a62018-07-05 11:54:10 -04001136 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1137 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001138
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001139 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001140 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1141 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001142
1143 // On every flush, create a semaphore to be signaled. On the next submission, this semaphore
1144 // will be waited on.
1145 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &mSubmitLastSignaledSemaphore));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001146
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001147 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -05001148 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001149 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1150 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001151 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001152 submitInfo.commandBufferCount = 1;
Jamie Madillbea35a62018-07-05 11:54:10 -04001153 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001154 submitInfo.signalSemaphoreCount = 1;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001155 submitInfo.pSignalSemaphores = mSubmitLastSignaledSemaphore.getSemaphore()->ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001156
Jamie Madill21061022018-07-12 23:56:30 -04001157 ANGLE_TRY(submitFrame(context, submitInfo, commandBatch.release()));
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001158
Jamie Madill21061022018-07-12 23:56:30 -04001159 return angle::Result::Continue();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001160}
1161
Jamie Madill78feddc2018-04-27 11:45:05 -04001162Serial RendererVk::issueShaderSerial()
Jamie Madillf2f6d372018-01-10 21:37:23 -05001163{
Jamie Madill78feddc2018-04-27 11:45:05 -04001164 return mShaderSerialFactory.generate();
Jamie Madillf2f6d372018-01-10 21:37:23 -05001165}
1166
Jamie Madill06ca6342018-07-12 15:56:53 -04001167angle::Result RendererVk::getPipeline(vk::Context *context,
1168 const vk::ShaderAndSerial &vertexShader,
1169 const vk::ShaderAndSerial &fragmentShader,
1170 const vk::PipelineLayout &pipelineLayout,
Shahbaz Youssefic30f45d2018-11-12 16:37:59 -05001171 const vk::GraphicsPipelineDesc &pipelineDesc,
Jamie Madill06ca6342018-07-12 15:56:53 -04001172 const gl::AttributesMask &activeAttribLocationsMask,
1173 vk::PipelineAndSerial **pipelineOut)
Jamie Madillffa4cbb2018-01-23 13:04:07 -05001174{
Jamie Madill06ca6342018-07-12 15:56:53 -04001175 ASSERT(vertexShader.getSerial() ==
Jamie Madill9aef3672018-04-27 11:45:06 -04001176 pipelineDesc.getShaderStageInfo()[vk::ShaderType::VertexShader].moduleSerial);
Jamie Madill06ca6342018-07-12 15:56:53 -04001177 ASSERT(fragmentShader.getSerial() ==
Jamie Madill9aef3672018-04-27 11:45:06 -04001178 pipelineDesc.getShaderStageInfo()[vk::ShaderType::FragmentShader].moduleSerial);
1179
1180 // Pull in a compatible RenderPass.
1181 vk::RenderPass *compatibleRenderPass = nullptr;
Jamie Madill21061022018-07-12 23:56:30 -04001182 ANGLE_TRY(
1183 getCompatibleRenderPass(context, pipelineDesc.getRenderPassDesc(), &compatibleRenderPass));
Jamie Madill9aef3672018-04-27 11:45:06 -04001184
Shahbaz Youssefic30f45d2018-11-12 16:37:59 -05001185 return mGraphicsPipelineCache.getPipeline(
1186 context, mPipelineCacheVk, *compatibleRenderPass, pipelineLayout, activeAttribLocationsMask,
1187 vertexShader.get(), fragmentShader.get(), pipelineDesc, pipelineOut);
Jamie Madill9aef3672018-04-27 11:45:06 -04001188}
1189
Jamie Madill21061022018-07-12 23:56:30 -04001190angle::Result RendererVk::getDescriptorSetLayout(
1191 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001192 const vk::DescriptorSetLayoutDesc &desc,
1193 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1194{
Jamie Madill21061022018-07-12 23:56:30 -04001195 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
Jamie Madill9b168d02018-06-13 13:25:32 -04001196}
1197
Jamie Madill21061022018-07-12 23:56:30 -04001198angle::Result RendererVk::getPipelineLayout(
1199 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001200 const vk::PipelineLayoutDesc &desc,
1201 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1202 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1203{
Jamie Madill21061022018-07-12 23:56:30 -04001204 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
Jamie Madill9b168d02018-06-13 13:25:32 -04001205 pipelineLayoutOut);
1206}
1207
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001208angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1209{
1210 ASSERT(mPipelineCacheVk.valid());
1211
1212 if (--mPipelineCacheVkUpdateTimeout > 0)
1213 {
1214 return angle::Result::Continue();
1215 }
1216
1217 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1218
1219 // Get the size of the cache.
1220 size_t pipelineCacheSize = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05001221 VkResult result = mPipelineCacheVk.getCacheData(mDevice, &pipelineCacheSize, nullptr);
1222 if (result != VK_INCOMPLETE)
1223 {
1224 ANGLE_VK_TRY(displayVk, result);
1225 }
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001226
1227 angle::MemoryBuffer *pipelineCacheData = nullptr;
1228 ANGLE_VK_CHECK_ALLOC(displayVk,
1229 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1230
1231 size_t originalPipelineCacheSize = pipelineCacheSize;
Yuly Novikov27780292018-11-09 11:19:49 -05001232 result = mPipelineCacheVk.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001233 // Note: currently we don't accept incomplete as we don't expect it (the full size of cache
1234 // was determined just above), so receiving it hints at an implementation bug we would want
1235 // to know about early.
Yuly Novikov27780292018-11-09 11:19:49 -05001236 ASSERT(result != VK_INCOMPLETE);
1237 ANGLE_VK_TRY(displayVk, result);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001238
1239 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1240 // the buffer to avoid leaking garbage memory.
1241 ASSERT(pipelineCacheSize <= originalPipelineCacheSize);
1242 if (pipelineCacheSize < originalPipelineCacheSize)
1243 {
1244 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1245 originalPipelineCacheSize - pipelineCacheSize);
1246 }
1247
1248 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1249
1250 return angle::Result::Continue();
1251}
1252
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001253angle::Result RendererVk::allocateSubmitWaitSemaphore(vk::Context *context,
1254 const vk::Semaphore **outSemaphore)
1255{
1256 ASSERT(mSubmitWaitSemaphores.size() < mSubmitWaitSemaphores.max_size());
1257
1258 vk::SemaphoreHelper semaphore;
1259 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &semaphore));
1260
1261 mSubmitWaitSemaphores.push_back(std::move(semaphore));
1262 *outSemaphore = mSubmitWaitSemaphores.back().getSemaphore();
1263
1264 return angle::Result::Continue();
1265}
1266
1267const vk::Semaphore *RendererVk::getSubmitLastSignaledSemaphore(vk::Context *context)
1268{
1269 const vk::Semaphore *semaphore = mSubmitLastSignaledSemaphore.getSemaphore();
1270
1271 // Return the semaphore to the pool (which will remain valid and unused until the
1272 // queue it's about to be waited on has finished execution). The caller is about
1273 // to wait on it.
1274 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
1275
1276 return semaphore;
1277}
1278
Jamie Madilld47044a2018-04-27 11:45:03 -04001279vk::ShaderLibrary *RendererVk::getShaderLibrary()
1280{
1281 return &mShaderLibrary;
1282}
Luc Ferron90968362018-05-04 08:47:22 -04001283
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001284angle::Result RendererVk::getTimestamp(vk::Context *context, uint64_t *timestampOut)
1285{
1286 // The intent of this function is to query the timestamp without stalling the GPU. Currently,
1287 // that seems impossible, so instead, we are going to make a small submission with just a
1288 // timestamp query. First, the disjoint timer query extension says:
1289 //
1290 // > This will return the GL time after all previous commands have reached the GL server but
1291 // have not yet necessarily executed.
1292 //
1293 // The previous commands are stored in the command graph at the moment and are not yet flushed.
1294 // The wording allows us to make a submission to get the timestamp without performing a flush.
1295 //
1296 // Second:
1297 //
1298 // > By using a combination of this synchronous get command and the asynchronous timestamp query
1299 // object target, applications can measure the latency between when commands reach the GL server
1300 // and when they are realized in the framebuffer.
1301 //
1302 // This fits with the above strategy as well, although inevitably we are possibly introducing a
1303 // GPU bubble. This function directly generates a command buffer and submits it instead of
1304 // using the other member functions. This is to avoid changing any state, such as the queue
1305 // serial.
1306
1307 // Create a query used to receive the GPU timestamp
1308 vk::Scoped<vk::DynamicQueryPool> timestampQueryPool(mDevice);
1309 vk::QueryHelper timestampQuery;
1310 ANGLE_TRY(timestampQueryPool.get().init(context, VK_QUERY_TYPE_TIMESTAMP, 1));
1311 ANGLE_TRY(timestampQueryPool.get().allocateQuery(context, &timestampQuery));
1312
1313 // Record the command buffer
1314 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1315 vk::CommandBuffer &commandBuffer = commandBatch.get();
1316
1317 VkCommandBufferAllocateInfo commandBufferInfo = {};
1318 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1319 commandBufferInfo.commandPool = mCommandPool.getHandle();
1320 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1321 commandBufferInfo.commandBufferCount = 1;
1322
Yuly Novikov27780292018-11-09 11:19:49 -05001323 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001324
1325 VkCommandBufferBeginInfo beginInfo = {};
1326 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1327 beginInfo.flags = 0;
1328 beginInfo.pInheritanceInfo = nullptr;
1329
Yuly Novikov27780292018-11-09 11:19:49 -05001330 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001331
1332 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1333 timestampQuery.getQuery(), 1);
1334 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1335 timestampQuery.getQueryPool()->getHandle(),
1336 timestampQuery.getQuery());
1337
Yuly Novikov27780292018-11-09 11:19:49 -05001338 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001339
1340 // Create fence for the submission
1341 VkFenceCreateInfo fenceInfo = {};
1342 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1343 fenceInfo.flags = 0;
1344
1345 vk::Scoped<vk::Fence> fence(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001346 ANGLE_VK_TRY(context, fence.get().init(mDevice, fenceInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001347
1348 // Submit the command buffer
1349 VkSubmitInfo submitInfo = {};
1350 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1351 submitInfo.waitSemaphoreCount = 0;
1352 submitInfo.pWaitSemaphores = nullptr;
1353 submitInfo.pWaitDstStageMask = nullptr;
1354 submitInfo.commandBufferCount = 1;
1355 submitInfo.pCommandBuffers = commandBuffer.ptr();
1356 submitInfo.signalSemaphoreCount = 0;
1357 submitInfo.pSignalSemaphores = nullptr;
1358
1359 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, fence.get().getHandle()));
1360
1361 // Wait for the submission to finish. Given no semaphores, there is hope that it would execute
1362 // in parallel with what's already running on the GPU.
Yuly Novikov27780292018-11-09 11:19:49 -05001363 ANGLE_VK_TRY(context, fence.get().wait(mDevice, kMaxFenceWaitTimeNs));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001364
1365 // Get the query results
1366 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
1367
Yuly Novikov27780292018-11-09 11:19:49 -05001368 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
1369 mDevice, timestampQuery.getQuery(), 1, sizeof(*timestampOut),
1370 timestampOut, sizeof(*timestampOut), queryFlags));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001371
1372 timestampQueryPool.get().freeQuery(context, &timestampQuery);
1373
1374 return angle::Result::Continue();
1375}
1376
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001377angle::Result RendererVk::synchronizeCpuGpuTime(vk::Context *context)
1378{
1379 ASSERT(mGpuEventsEnabled);
1380
1381 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1382 ASSERT(platform);
1383
1384 // To synchronize CPU and GPU times, we need to get the CPU timestamp as close as possible to
1385 // the GPU timestamp. The process of getting the GPU timestamp is as follows:
1386 //
1387 // CPU GPU
1388 //
1389 // Record command buffer
1390 // with timestamp query
1391 //
1392 // Submit command buffer
1393 //
1394 // Post-submission work Begin execution
1395 //
1396 // ???? Write timstamp Tgpu
1397 //
1398 // ???? End execution
1399 //
1400 // ???? Return query results
1401 //
1402 // ????
1403 //
1404 // Get query results
1405 //
1406 // The areas of unknown work (????) on the CPU indicate that the CPU may or may not have
1407 // finished post-submission work while the GPU is executing in parallel. With no further work,
1408 // querying CPU timestamps before submission and after getting query results give the bounds to
1409 // Tgpu, which could be quite large.
1410 //
1411 // Using VkEvents, the GPU can be made to wait for the CPU and vice versa, in an effort to
1412 // reduce this range. This function implements the following procedure:
1413 //
1414 // CPU GPU
1415 //
1416 // Record command buffer
1417 // with timestamp query
1418 //
1419 // Submit command buffer
1420 //
1421 // Post-submission work Begin execution
1422 //
1423 // ???? Set Event GPUReady
1424 //
1425 // Wait on Event GPUReady Wait on Event CPUReady
1426 //
1427 // Get CPU Time Ts Wait on Event CPUReady
1428 //
1429 // Set Event CPUReady Wait on Event CPUReady
1430 //
1431 // Get CPU Time Tcpu Get GPU Time Tgpu
1432 //
1433 // Wait on Event GPUDone Set Event GPUDone
1434 //
1435 // Get CPU Time Te End Execution
1436 //
1437 // Idle Return query results
1438 //
1439 // Get query results
1440 //
1441 // If Te-Ts > epsilon, a GPU or CPU interruption can be assumed and the operation can be
1442 // retried. Once Te-Ts < epsilon, Tcpu can be taken to presumably match Tgpu. Finding an
1443 // epsilon that's valid for all devices may be difficult, so the loop can be performed only a
1444 // limited number of times and the Tcpu,Tgpu pair corresponding to smallest Te-Ts used for
1445 // calibration.
1446 //
1447 // Note: Once VK_EXT_calibrated_timestamps is ubiquitous, this should be redone.
1448
1449 // Make sure nothing is running
1450 ASSERT(mCommandGraph.empty());
1451
1452 TRACE_EVENT0("gpu.angle", "RendererVk::synchronizeCpuGpuTime");
1453
1454 // Create a query used to receive the GPU timestamp
1455 vk::QueryHelper timestampQuery;
1456 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &timestampQuery));
1457
1458 // Create the three events
1459 VkEventCreateInfo eventCreateInfo = {};
1460 eventCreateInfo.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
1461 eventCreateInfo.flags = 0;
1462
1463 vk::Scoped<vk::Event> cpuReady(mDevice), gpuReady(mDevice), gpuDone(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001464 ANGLE_VK_TRY(context, cpuReady.get().init(mDevice, eventCreateInfo));
1465 ANGLE_VK_TRY(context, gpuReady.get().init(mDevice, eventCreateInfo));
1466 ANGLE_VK_TRY(context, gpuDone.get().init(mDevice, eventCreateInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001467
1468 constexpr uint32_t kRetries = 10;
1469
1470 // Time suffixes used are S for seconds and Cycles for cycles
1471 double tightestRangeS = 1e6f;
1472 double TcpuS = 0;
1473 uint64_t TgpuCycles = 0;
1474 for (uint32_t i = 0; i < kRetries; ++i)
1475 {
1476 // Reset the events
Yuly Novikov27780292018-11-09 11:19:49 -05001477 ANGLE_VK_TRY(context, cpuReady.get().reset(mDevice));
1478 ANGLE_VK_TRY(context, gpuReady.get().reset(mDevice));
1479 ANGLE_VK_TRY(context, gpuDone.get().reset(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001480
1481 // Record the command buffer
1482 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1483 vk::CommandBuffer &commandBuffer = commandBatch.get();
1484
1485 VkCommandBufferAllocateInfo commandBufferInfo = {};
1486 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1487 commandBufferInfo.commandPool = mCommandPool.getHandle();
1488 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1489 commandBufferInfo.commandBufferCount = 1;
1490
Yuly Novikov27780292018-11-09 11:19:49 -05001491 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001492
1493 VkCommandBufferBeginInfo beginInfo = {};
1494 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1495 beginInfo.flags = 0;
1496 beginInfo.pInheritanceInfo = nullptr;
1497
Yuly Novikov27780292018-11-09 11:19:49 -05001498 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001499
1500 commandBuffer.setEvent(gpuReady.get(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
1501 commandBuffer.waitEvents(1, cpuReady.get().ptr(), VK_PIPELINE_STAGE_HOST_BIT,
1502 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, nullptr, 0, nullptr, 0,
1503 nullptr);
1504
1505 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1506 timestampQuery.getQuery(), 1);
1507 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1508 timestampQuery.getQueryPool()->getHandle(),
1509 timestampQuery.getQuery());
1510
1511 commandBuffer.setEvent(gpuDone.get(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
1512
Yuly Novikov27780292018-11-09 11:19:49 -05001513 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001514
1515 // Submit the command buffer
1516 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
1517 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1518 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
1519
1520 VkSubmitInfo submitInfo = {};
1521 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1522 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1523 submitInfo.pWaitSemaphores = waitSemaphores.data();
1524 submitInfo.pWaitDstStageMask = waitStageMasks.data();
1525 submitInfo.commandBufferCount = 1;
1526 submitInfo.pCommandBuffers = commandBuffer.ptr();
1527 submitInfo.signalSemaphoreCount = 0;
1528 submitInfo.pSignalSemaphores = nullptr;
1529
1530 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBuffer)));
1531
1532 // Wait for GPU to be ready. This is a short busy wait.
Yuly Novikov27780292018-11-09 11:19:49 -05001533 VkResult result = VK_EVENT_RESET;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001534 do
1535 {
Yuly Novikov27780292018-11-09 11:19:49 -05001536 result = gpuReady.get().getStatus(mDevice);
1537 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1538 {
1539 ANGLE_VK_TRY(context, result);
1540 }
1541 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001542
1543 double TsS = platform->monotonicallyIncreasingTime(platform);
1544
1545 // Tell the GPU to go ahead with the timestamp query.
Yuly Novikov27780292018-11-09 11:19:49 -05001546 ANGLE_VK_TRY(context, cpuReady.get().set(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001547 double cpuTimestampS = platform->monotonicallyIncreasingTime(platform);
1548
1549 // Wait for GPU to be done. Another short busy wait.
1550 do
1551 {
Yuly Novikov27780292018-11-09 11:19:49 -05001552 result = gpuDone.get().getStatus(mDevice);
1553 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1554 {
1555 ANGLE_VK_TRY(context, result);
1556 }
1557 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001558
1559 double TeS = platform->monotonicallyIncreasingTime(platform);
1560
1561 // Get the query results
1562 ANGLE_TRY(finishToSerial(context, getLastSubmittedQueueSerial()));
1563
1564 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
1565
1566 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05001567 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
1568 mDevice, timestampQuery.getQuery(), 1, sizeof(gpuTimestampCycles),
1569 &gpuTimestampCycles, sizeof(gpuTimestampCycles), queryFlags));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001570
1571 // Use the first timestamp queried as origin.
1572 if (mGpuEventTimestampOrigin == 0)
1573 {
1574 mGpuEventTimestampOrigin = gpuTimestampCycles;
1575 }
1576
1577 // Take these CPU and GPU timestamps if there is better confidence.
1578 double confidenceRangeS = TeS - TsS;
1579 if (confidenceRangeS < tightestRangeS)
1580 {
1581 tightestRangeS = confidenceRangeS;
1582 TcpuS = cpuTimestampS;
1583 TgpuCycles = gpuTimestampCycles;
1584 }
1585 }
1586
1587 mGpuEventQueryPool.freeQuery(context, &timestampQuery);
1588
1589 // timestampPeriod gives nanoseconds/cycle.
1590 double TgpuS = (TgpuCycles - mGpuEventTimestampOrigin) *
1591 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) /
1592 1'000'000'000.0;
1593
1594 flushGpuEvents(TgpuS, TcpuS);
1595
1596 mGpuClockSync.gpuTimestampS = TgpuS;
1597 mGpuClockSync.cpuTimestampS = TcpuS;
1598
1599 return angle::Result::Continue();
1600}
1601
1602angle::Result RendererVk::traceGpuEventImpl(vk::Context *context,
1603 vk::CommandBuffer *commandBuffer,
1604 char phase,
1605 const char *name)
1606{
1607 ASSERT(mGpuEventsEnabled);
1608
1609 GpuEventQuery event;
1610
1611 event.name = name;
1612 event.phase = phase;
1613 event.serial = mCurrentQueueSerial;
1614
1615 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &event.queryPoolIndex, &event.queryIndex));
1616
1617 commandBuffer->resetQueryPool(
1618 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex, 1);
1619 commandBuffer->writeTimestamp(
1620 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1621 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex);
1622
1623 mInFlightGpuEventQueries.push_back(std::move(event));
1624
1625 return angle::Result::Continue();
1626}
1627
1628angle::Result RendererVk::checkCompletedGpuEvents(vk::Context *context)
1629{
1630 ASSERT(mGpuEventsEnabled);
1631
1632 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1633 ASSERT(platform);
1634
1635 int finishedCount = 0;
1636
1637 for (GpuEventQuery &eventQuery : mInFlightGpuEventQueries)
1638 {
1639 // Only check the timestamp query if the submission has finished.
1640 if (eventQuery.serial > mLastCompletedQueueSerial)
1641 {
1642 break;
1643 }
1644
1645 // See if the results are available.
1646 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05001647 VkResult result = mGpuEventQueryPool.getQueryPool(eventQuery.queryPoolIndex)
1648 ->getResults(mDevice, eventQuery.queryIndex, 1,
1649 sizeof(gpuTimestampCycles), &gpuTimestampCycles,
1650 sizeof(gpuTimestampCycles), VK_QUERY_RESULT_64_BIT);
1651 if (result == VK_NOT_READY)
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001652 {
1653 break;
1654 }
Yuly Novikov27780292018-11-09 11:19:49 -05001655 ANGLE_VK_TRY(context, result);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001656
1657 mGpuEventQueryPool.freeQuery(context, eventQuery.queryPoolIndex, eventQuery.queryIndex);
1658
1659 GpuEvent event;
1660 event.gpuTimestampCycles = gpuTimestampCycles;
1661 event.name = eventQuery.name;
1662 event.phase = eventQuery.phase;
1663
1664 mGpuEvents.emplace_back(event);
1665
1666 ++finishedCount;
1667 }
1668
1669 mInFlightGpuEventQueries.erase(mInFlightGpuEventQueries.begin(),
1670 mInFlightGpuEventQueries.begin() + finishedCount);
1671
1672 return angle::Result::Continue();
1673}
1674
1675void RendererVk::flushGpuEvents(double nextSyncGpuTimestampS, double nextSyncCpuTimestampS)
1676{
1677 if (mGpuEvents.size() == 0)
1678 {
1679 return;
1680 }
1681
1682 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1683 ASSERT(platform);
1684
1685 // Find the slope of the clock drift for adjustment
1686 double lastGpuSyncTimeS = mGpuClockSync.gpuTimestampS;
1687 double lastGpuSyncDiffS = mGpuClockSync.cpuTimestampS - mGpuClockSync.gpuTimestampS;
1688 double gpuSyncDriftSlope = 0;
1689
1690 double nextGpuSyncTimeS = nextSyncGpuTimestampS;
1691 double nextGpuSyncDiffS = nextSyncCpuTimestampS - nextSyncGpuTimestampS;
1692
1693 // No gpu trace events should have been generated before the clock sync, so if there is no
1694 // "previous" clock sync, there should be no gpu events (i.e. the function early-outs above).
1695 ASSERT(mGpuClockSync.gpuTimestampS != std::numeric_limits<double>::max() &&
1696 mGpuClockSync.cpuTimestampS != std::numeric_limits<double>::max());
1697
1698 gpuSyncDriftSlope =
1699 (nextGpuSyncDiffS - lastGpuSyncDiffS) / (nextGpuSyncTimeS - lastGpuSyncTimeS);
1700
1701 for (const GpuEvent &event : mGpuEvents)
1702 {
1703 double gpuTimestampS =
1704 (event.gpuTimestampCycles - mGpuEventTimestampOrigin) *
1705 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) * 1e-9;
1706
1707 // Account for clock drift.
1708 gpuTimestampS += lastGpuSyncDiffS + gpuSyncDriftSlope * (gpuTimestampS - lastGpuSyncTimeS);
1709
1710 // Generate the trace now that the GPU timestamp is available and clock drifts are accounted
1711 // for.
1712 static long long eventId = 1;
1713 static const unsigned char *categoryEnabled =
1714 TRACE_EVENT_API_GET_CATEGORY_ENABLED("gpu.angle.gpu");
1715 platform->addTraceEvent(platform, event.phase, categoryEnabled, event.name, eventId++,
1716 gpuTimestampS, 0, nullptr, nullptr, nullptr, TRACE_EVENT_FLAG_NONE);
1717 }
1718
1719 mGpuEvents.clear();
1720}
1721
Jamie Madillaaca96e2018-06-12 10:19:48 -04001722uint32_t GetUniformBufferDescriptorCount()
1723{
1724 return kUniformBufferDescriptorsPerDescriptorSet;
1725}
1726
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001727} // namespace rx