blob: 5260dbbae00f30d6dbd813aa4897fd274c70f875 [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
Jamie Madill3c424b42018-01-19 12:35:09 -050013#include "libANGLE/renderer/vulkan/vk_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050014
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050018#include "common/system_utils.h"
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040019#include "libANGLE/Display.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050020#include "libANGLE/renderer/driver_utils.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050021#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050022#include "libANGLE/renderer/vulkan/CompilerVk.h"
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040023#include "libANGLE/renderer/vulkan/DisplayVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050024#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050025#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madillffa4cbb2018-01-23 13:04:07 -050026#include "libANGLE/renderer/vulkan/ProgramVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050027#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Luc Ferrone4741fd2018-01-25 13:25:27 -050028#include "libANGLE/renderer/vulkan/vk_caps_utils.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050029#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050030#include "platform/Platform.h"
Jamie Madill9e54b5a2016-05-25 12:57:39 -040031
Shahbaz Youssefi61656022018-10-24 15:00:50 -040032#include "third_party/trace_event/trace_event.h"
33
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070034// Consts
35namespace
36{
Jamie Madill7c985f52018-11-29 18:16:17 -050037const uint32_t kMockVendorID = 0xba5eba11;
38const uint32_t kMockDeviceID = 0xf005ba11;
39constexpr char kMockDeviceName[] = "Vulkan Mock Device";
40constexpr size_t kInFlightCommandsLimit = 100u;
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -050041constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070042} // anonymous namespace
43
Jamie Madill9e54b5a2016-05-25 12:57:39 -040044namespace rx
45{
46
Jamie Madille09bd5d2016-11-29 16:20:35 -050047namespace
48{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040049// We currently only allocate 2 uniform buffer per descriptor set, one for the fragment shader and
50// one for the vertex shader.
51constexpr size_t kUniformBufferDescriptorsPerDescriptorSet = 2;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040052// Update the pipeline cache every this many swaps (if 60fps, this means every 10 minutes)
53static constexpr uint32_t kPipelineCacheVkUpdatePeriod = 10 * 60 * 60;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040054// Wait a maximum of 10s. If that times out, we declare it a failure.
55static constexpr uint64_t kMaxFenceWaitTimeNs = 10'000'000'000llu;
Jamie Madille09bd5d2016-11-29 16:20:35 -050056
Omar El Sheikh26c61b22018-06-29 12:50:59 -060057bool ShouldEnableMockICD(const egl::AttributeMap &attribs)
58{
59#if !defined(ANGLE_PLATFORM_ANDROID)
60 // Mock ICD does not currently run on Android
61 return (attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
62 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE) ==
63 EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE);
64#else
65 return false;
66#endif // !defined(ANGLE_PLATFORM_ANDROID)
67}
68
Jamie Madille09bd5d2016-11-29 16:20:35 -050069VkResult VerifyExtensionsPresent(const std::vector<VkExtensionProperties> &extensionProps,
70 const std::vector<const char *> &enabledExtensionNames)
71{
72 // Compile the extensions names into a set.
73 std::set<std::string> extensionNames;
74 for (const auto &extensionProp : extensionProps)
75 {
76 extensionNames.insert(extensionProp.extensionName);
77 }
78
Jamie Madillacf2f3a2017-11-21 19:22:44 -050079 for (const char *extensionName : enabledExtensionNames)
Jamie Madille09bd5d2016-11-29 16:20:35 -050080 {
81 if (extensionNames.count(extensionName) == 0)
82 {
83 return VK_ERROR_EXTENSION_NOT_PRESENT;
84 }
85 }
86
87 return VK_SUCCESS;
88}
89
Tobin Ehlis3a181e32018-08-29 15:17:05 -060090// Array of Validation error/warning messages that will be ignored, should include bugID
91constexpr std::array<const char *, 1> kSkippedMessages = {
92 // http://anglebug.com/2796
93 " [ UNASSIGNED-CoreValidation-Shader-PointSizeMissing ] Object: VK_NULL_HANDLE (Type = 19) "
94 "| Pipeline topology is set to POINT_LIST, but PointSize is not written to in the shader "
95 "corresponding to VK_SHADER_STAGE_VERTEX_BIT."};
96
97// Suppress validation errors that are known
98// return "true" if given code/prefix/message is known, else return "false"
99bool IsIgnoredDebugMessage(const char *message)
100{
101 for (const auto &msg : kSkippedMessages)
102 {
103 if (strcmp(msg, message) == 0)
104 {
105 return true;
106 }
107 }
108 return false;
109}
110
Yuly Novikov199f4292018-01-19 19:04:05 -0500111VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
112 VkDebugReportObjectTypeEXT objectType,
113 uint64_t object,
114 size_t location,
115 int32_t messageCode,
116 const char *layerPrefix,
117 const char *message,
118 void *userData)
Jamie Madill0448ec82016-12-23 13:41:47 -0500119{
Tobin Ehlis3a181e32018-08-29 15:17:05 -0600120 if (IsIgnoredDebugMessage(message))
121 {
122 return VK_FALSE;
123 }
Jamie Madill0448ec82016-12-23 13:41:47 -0500124 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
125 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500126 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500127#if !defined(NDEBUG)
128 // Abort the call in Debug builds.
129 return VK_TRUE;
130#endif
131 }
132 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
133 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500134 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500135 }
136 else
137 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500138 // Uncomment this if you want Vulkan spam.
139 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500140 }
141
142 return VK_FALSE;
143}
144
Yuly Novikov199f4292018-01-19 19:04:05 -0500145// If we're loading the validation layers, we could be running from any random directory.
146// Change to the executable directory so we can find the layers, then change back to the
147// previous directory to be safe we don't disrupt the application.
148class ScopedVkLoaderEnvironment : angle::NonCopyable
149{
150 public:
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600151 ScopedVkLoaderEnvironment(bool enableValidationLayers, bool enableMockICD)
152 : mEnableValidationLayers(enableValidationLayers),
153 mEnableMockICD(enableMockICD),
154 mChangedCWD(false),
155 mChangedICDPath(false)
Yuly Novikov199f4292018-01-19 19:04:05 -0500156 {
157// Changing CWD and setting environment variables makes no sense on Android,
158// since this code is a part of Java application there.
159// Android Vulkan loader doesn't need this either.
160#if !defined(ANGLE_PLATFORM_ANDROID)
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600161 if (enableMockICD)
162 {
163 // Override environment variable to use built Mock ICD
164 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
165 mPreviousICDPath = angle::GetEnvironmentVar(g_VkICDPathEnv);
166 mChangedICDPath = angle::SetEnvironmentVar(g_VkICDPathEnv, ANGLE_VK_ICD_JSON);
167 if (!mChangedICDPath)
168 {
169 ERR() << "Error setting Path for Mock/Null Driver.";
170 mEnableMockICD = false;
171 }
172 }
Jamie Madill46848422018-08-09 10:46:06 -0400173 if (mEnableValidationLayers || mEnableMockICD)
Yuly Novikov199f4292018-01-19 19:04:05 -0500174 {
175 const auto &cwd = angle::GetCWD();
176 if (!cwd.valid())
177 {
178 ERR() << "Error getting CWD for Vulkan layers init.";
179 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400180 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500181 }
182 else
183 {
184 mPreviousCWD = cwd.value();
185 const char *exeDir = angle::GetExecutableDirectory();
186 mChangedCWD = angle::SetCWD(exeDir);
187 if (!mChangedCWD)
188 {
189 ERR() << "Error setting CWD for Vulkan layers init.";
190 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400191 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500192 }
193 }
194 }
195
196 // Override environment variable to use the ANGLE layers.
197 if (mEnableValidationLayers)
198 {
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700199 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_DATA_DIR))
Yuly Novikov199f4292018-01-19 19:04:05 -0500200 {
201 ERR() << "Error setting environment for Vulkan layers init.";
202 mEnableValidationLayers = false;
203 }
204 }
205#endif // !defined(ANGLE_PLATFORM_ANDROID)
206 }
207
208 ~ScopedVkLoaderEnvironment()
209 {
210 if (mChangedCWD)
211 {
212#if !defined(ANGLE_PLATFORM_ANDROID)
213 ASSERT(mPreviousCWD.valid());
214 angle::SetCWD(mPreviousCWD.value().c_str());
215#endif // !defined(ANGLE_PLATFORM_ANDROID)
216 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600217 if (mChangedICDPath)
218 {
Omar El Sheikh80d4ef12018-07-13 17:08:19 -0600219 if (mPreviousICDPath.value().empty())
220 {
221 angle::UnsetEnvironmentVar(g_VkICDPathEnv);
222 }
223 else
224 {
225 angle::SetEnvironmentVar(g_VkICDPathEnv, mPreviousICDPath.value().c_str());
226 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600227 }
Yuly Novikov199f4292018-01-19 19:04:05 -0500228 }
229
Jamie Madillaaca96e2018-06-12 10:19:48 -0400230 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
Yuly Novikov199f4292018-01-19 19:04:05 -0500231
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600232 bool canEnableMockICD() const { return mEnableMockICD; }
233
Yuly Novikov199f4292018-01-19 19:04:05 -0500234 private:
235 bool mEnableValidationLayers;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600236 bool mEnableMockICD;
Yuly Novikov199f4292018-01-19 19:04:05 -0500237 bool mChangedCWD;
238 Optional<std::string> mPreviousCWD;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600239 bool mChangedICDPath;
240 Optional<std::string> mPreviousICDPath;
Yuly Novikov199f4292018-01-19 19:04:05 -0500241};
242
Jamie Madill21061022018-07-12 23:56:30 -0400243void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
244 bool preferMockICD,
245 VkPhysicalDevice *physicalDeviceOut,
246 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
247{
248 ASSERT(!physicalDevices.empty());
249 if (preferMockICD)
250 {
251 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
252 {
253 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
254 if ((kMockVendorID == physicalDevicePropertiesOut->vendorID) &&
255 (kMockDeviceID == physicalDevicePropertiesOut->deviceID) &&
256 (strcmp(kMockDeviceName, physicalDevicePropertiesOut->deviceName) == 0))
257 {
258 *physicalDeviceOut = physicalDevice;
259 return;
260 }
261 }
262 WARN() << "Vulkan Mock Driver was requested but Mock Device was not found. Using default "
263 "physicalDevice instead.";
264 }
265
266 // Fall back to first device.
267 *physicalDeviceOut = physicalDevices[0];
268 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
269}
Jamie Madill0da73fe2018-10-02 09:31:39 -0400270
271// Initially dumping the command graphs is disabled.
272constexpr bool kEnableCommandGraphDiagnostics = false;
Ian Elliottbcb78902018-12-19 11:46:29 -0700273
274bool ExtensionFound(const char *extensionName,
275 const std::vector<VkExtensionProperties> &extensionProps)
276{
277 for (const auto &extensionProp : extensionProps)
278 {
279 if (strcmp(extensionProp.extensionName, extensionName) == 0)
280 {
281 return true;
282 }
283 }
284 return false;
285}
Jamie Madille09bd5d2016-11-29 16:20:35 -0500286} // anonymous namespace
287
Jamie Madill49ac74b2017-12-21 14:42:33 -0500288// CommandBatch implementation.
Jamie Madillaaca96e2018-06-12 10:19:48 -0400289RendererVk::CommandBatch::CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500290
Jamie Madillaaca96e2018-06-12 10:19:48 -0400291RendererVk::CommandBatch::~CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500292
293RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
294 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
Jamie Madillb980c562018-11-27 11:34:27 -0500295{}
Jamie Madill49ac74b2017-12-21 14:42:33 -0500296
297RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
298{
299 std::swap(commandPool, other.commandPool);
300 std::swap(fence, other.fence);
301 std::swap(serial, other.serial);
302 return *this;
303}
304
Jamie Madillbea35a62018-07-05 11:54:10 -0400305void RendererVk::CommandBatch::destroy(VkDevice device)
306{
307 commandPool.destroy(device);
308 fence.destroy(device);
309}
310
Jamie Madill9f2a8612017-11-30 12:43:09 -0500311// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500312RendererVk::RendererVk()
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400313 : mDisplay(nullptr),
314 mCapsInitialized(false),
Ian Elliottbcb78902018-12-19 11:46:29 -0700315 mFeaturesInitialized(false),
Jamie Madill0448ec82016-12-23 13:41:47 -0500316 mInstance(VK_NULL_HANDLE),
317 mEnableValidationLayers(false),
Jamie Madill0ea96212018-10-30 15:14:51 -0400318 mEnableMockICD(false),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500319 mDebugReportCallback(VK_NULL_HANDLE),
320 mPhysicalDevice(VK_NULL_HANDLE),
321 mQueue(VK_NULL_HANDLE),
322 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
323 mDevice(VK_NULL_HANDLE),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400324 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400325 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400326 mDeviceLost(false),
Jamie Madill0da73fe2018-10-02 09:31:39 -0400327 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400328 mCommandGraph(kEnableCommandGraphDiagnostics),
329 mGpuEventsEnabled(false),
330 mGpuClockSync{std::numeric_limits<double>::max(), std::numeric_limits<double>::max()},
331 mGpuEventTimestampOrigin(0)
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -0500332{
333 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
334 mFormatProperties.fill(invalid);
335}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400336
Jamie Madillb980c562018-11-27 11:34:27 -0500337RendererVk::~RendererVk() {}
Jamie Madill21061022018-07-12 23:56:30 -0400338
339void RendererVk::onDestroy(vk::Context *context)
340{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500341 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500342 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500343 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
Jamie Madill21061022018-07-12 23:56:30 -0400344 (void)finish(context);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500345 }
346
Shahbaz Youssefie3219402018-12-08 16:54:14 +0100347 mUtils.destroy(mDevice);
Shahbaz Youssefi8f1b7a62018-11-14 16:02:54 -0500348
Jamie Madillc7918ce2018-06-13 13:25:31 -0400349 mPipelineLayoutCache.destroy(mDevice);
350 mDescriptorSetLayoutCache.destroy(mDevice);
351
Jamie Madill9f2a8612017-11-30 12:43:09 -0500352 mRenderPassCache.destroy(mDevice);
Jamie Madilldc65c5b2018-11-21 11:07:26 -0500353 mPipelineCache.destroy(mDevice);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400354 mSubmitSemaphorePool.destroy(mDevice);
Jamie Madilld47044a2018-04-27 11:45:03 -0400355 mShaderLibrary.destroy(mDevice);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400356 mGpuEventQueryPool.destroy(mDevice);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500357
Jamie Madill06ca6342018-07-12 15:56:53 -0400358 GlslangWrapper::Release();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500359
Jamie Madill5deea722017-02-16 10:44:46 -0500360 if (mCommandPool.valid())
361 {
362 mCommandPool.destroy(mDevice);
363 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500364
365 if (mDevice)
366 {
367 vkDestroyDevice(mDevice, nullptr);
368 mDevice = VK_NULL_HANDLE;
369 }
370
Jamie Madill0448ec82016-12-23 13:41:47 -0500371 if (mDebugReportCallback)
372 {
373 ASSERT(mInstance);
374 auto destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(
375 vkGetInstanceProcAddr(mInstance, "vkDestroyDebugReportCallbackEXT"));
376 ASSERT(destroyDebugReportCallback);
377 destroyDebugReportCallback(mInstance, mDebugReportCallback, nullptr);
378 }
379
Jamie Madill4d0bf552016-12-28 15:45:24 -0500380 if (mInstance)
381 {
382 vkDestroyInstance(mInstance, nullptr);
383 mInstance = VK_NULL_HANDLE;
384 }
385
Omar El Sheikheb4b8692018-07-17 10:55:40 -0600386 mMemoryProperties.destroy();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500387 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500388}
389
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400390void RendererVk::notifyDeviceLost()
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400391{
392 mDeviceLost = true;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400393
394 mCommandGraph.clear();
395 mLastSubmittedQueueSerial = mCurrentQueueSerial;
396 mCurrentQueueSerial = mQueueSerialFactory.generate();
397 freeAllInFlightResources();
398
399 mDisplay->notifyDeviceLost();
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400400}
401
402bool RendererVk::isDeviceLost() const
403{
404 return mDeviceLost;
405}
406
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400407angle::Result RendererVk::initialize(DisplayVk *displayVk,
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400408 egl::Display *display,
Jamie Madill21061022018-07-12 23:56:30 -0400409 const char *wsiName)
Jamie Madill327ba852016-11-30 12:38:28 -0500410{
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400411 mDisplay = display;
412 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600413 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs),
414 ShouldEnableMockICD(attribs));
Yuly Novikov199f4292018-01-19 19:04:05 -0500415 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
Jamie Madill0ea96212018-10-30 15:14:51 -0400416 mEnableMockICD = scopedEnvironment.canEnableMockICD();
Jamie Madilla66779f2017-01-06 10:43:44 -0500417
Jamie Madill0448ec82016-12-23 13:41:47 -0500418 // Gather global layer properties.
419 uint32_t instanceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400420 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
Jamie Madill0448ec82016-12-23 13:41:47 -0500421
422 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
423 if (instanceLayerCount > 0)
424 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400425 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
426 instanceLayerProps.data()));
Jamie Madill0448ec82016-12-23 13:41:47 -0500427 }
428
Jamie Madille09bd5d2016-11-29 16:20:35 -0500429 uint32_t instanceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400430 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400431 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500432
433 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
434 if (instanceExtensionCount > 0)
435 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400436 ANGLE_VK_TRY(displayVk,
437 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
438 instanceExtensionProps.data()));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500439 }
440
Yuly Novikov199f4292018-01-19 19:04:05 -0500441 const char *const *enabledLayerNames = nullptr;
442 uint32_t enabledLayerCount = 0;
Jamie Madill0448ec82016-12-23 13:41:47 -0500443 if (mEnableValidationLayers)
444 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500445 bool layersRequested =
446 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
447 mEnableValidationLayers = GetAvailableValidationLayers(
448 instanceLayerProps, layersRequested, &enabledLayerNames, &enabledLayerCount);
Jamie Madill0448ec82016-12-23 13:41:47 -0500449 }
450
Jamie Madille09bd5d2016-11-29 16:20:35 -0500451 std::vector<const char *> enabledInstanceExtensions;
452 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Frank Henigman29f148b2016-11-23 21:05:36 -0500453 enabledInstanceExtensions.push_back(wsiName);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500454
Jamie Madill0448ec82016-12-23 13:41:47 -0500455 // TODO(jmadill): Should be able to continue initialization if debug report ext missing.
456 if (mEnableValidationLayers)
457 {
458 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
459 }
460
Jamie Madille09bd5d2016-11-29 16:20:35 -0500461 // Verify the required extensions are in the extension names set. Fail if not.
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400462 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400463 VerifyExtensionsPresent(instanceExtensionProps, enabledInstanceExtensions));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500464
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400465 VkApplicationInfo applicationInfo = {};
Jamie Madill327ba852016-11-30 12:38:28 -0500466 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
Jamie Madill327ba852016-11-30 12:38:28 -0500467 applicationInfo.pApplicationName = "ANGLE";
468 applicationInfo.applicationVersion = 1;
469 applicationInfo.pEngineName = "ANGLE";
470 applicationInfo.engineVersion = 1;
Ian Elliott899c5d22018-12-21 13:12:50 -0700471
472 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
473 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
474 if (!enumerateInstanceVersion)
475 {
476 applicationInfo.apiVersion = VK_API_VERSION_1_0;
477 }
478 else
479 {
480 uint32_t apiVersion = VK_API_VERSION_1_0;
481 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
482 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
483 {
484 // Note: will need to revisit this with Vulkan 1.2+.
485 applicationInfo.apiVersion = VK_API_VERSION_1_1;
486 }
487 else
488 {
489 applicationInfo.apiVersion = VK_API_VERSION_1_0;
490 }
491 }
Jamie Madill327ba852016-11-30 12:38:28 -0500492
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400493 VkInstanceCreateInfo instanceInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -0500494 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
495 instanceInfo.flags = 0;
496 instanceInfo.pApplicationInfo = &applicationInfo;
Jamie Madill327ba852016-11-30 12:38:28 -0500497
Jamie Madille09bd5d2016-11-29 16:20:35 -0500498 // Enable requested layers and extensions.
499 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
500 instanceInfo.ppEnabledExtensionNames =
501 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Yuly Novikov199f4292018-01-19 19:04:05 -0500502 instanceInfo.enabledLayerCount = enabledLayerCount;
503 instanceInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill327ba852016-11-30 12:38:28 -0500504
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400505 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
Jamie Madill327ba852016-11-30 12:38:28 -0500506
Jamie Madill0448ec82016-12-23 13:41:47 -0500507 if (mEnableValidationLayers)
508 {
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400509 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
Jamie Madill0448ec82016-12-23 13:41:47 -0500510
511 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
Jamie Madill0448ec82016-12-23 13:41:47 -0500512 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
513 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
514 VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT;
515 debugReportInfo.pfnCallback = &DebugReportCallback;
516 debugReportInfo.pUserData = this;
517
518 auto createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(
519 vkGetInstanceProcAddr(mInstance, "vkCreateDebugReportCallbackEXT"));
520 ASSERT(createDebugReportCallback);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400521 ANGLE_VK_TRY(displayVk, createDebugReportCallback(mInstance, &debugReportInfo, nullptr,
522 &mDebugReportCallback));
Jamie Madill0448ec82016-12-23 13:41:47 -0500523 }
524
Jamie Madill4d0bf552016-12-28 15:45:24 -0500525 uint32_t physicalDeviceCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400526 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
527 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500528
529 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700530 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400531 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
532 physicalDevices.data()));
Jamie Madill0ea96212018-10-30 15:14:51 -0400533 ChoosePhysicalDevice(physicalDevices, mEnableMockICD, &mPhysicalDevice,
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700534 &mPhysicalDeviceProperties);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500535
Jamie Madill30b5d842018-08-31 17:19:12 -0400536 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
537
Jamie Madill4d0bf552016-12-28 15:45:24 -0500538 // Ensure we can find a graphics queue family.
539 uint32_t queueCount = 0;
540 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
541
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400542 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500543
544 mQueueFamilyProperties.resize(queueCount);
545 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
546 mQueueFamilyProperties.data());
547
Jamie Madillb980c562018-11-27 11:34:27 -0500548 size_t graphicsQueueFamilyCount = false;
549 uint32_t firstGraphicsQueueFamily = 0;
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500550 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500551 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
552 {
553 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500554 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500555 {
556 ASSERT(queueInfo.queueCount > 0);
557 graphicsQueueFamilyCount++;
558 if (firstGraphicsQueueFamily == 0)
559 {
560 firstGraphicsQueueFamily = familyIndex;
561 }
562 break;
563 }
564 }
565
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400566 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500567
568 // If only one queue family, go ahead and initialize the device. If there is more than one
569 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
570 if (graphicsQueueFamilyCount == 1)
571 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400572 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500573 }
574
Jamie Madill035fd6b2017-10-03 15:43:22 -0400575 // Store the physical device memory properties so we can find the right memory pools.
576 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500577
Jamie Madill06ca6342018-07-12 15:56:53 -0400578 GlslangWrapper::Initialize();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500579
Jamie Madill6a89d222017-11-02 11:59:51 -0400580 // Initialize the format table.
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -0500581 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
Jamie Madill6a89d222017-11-02 11:59:51 -0400582
Jamie Madill7c985f52018-11-29 18:16:17 -0500583 return angle::Result::Continue;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400584}
585
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400586angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500587{
588 uint32_t deviceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400589 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400590 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500591
592 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
593 if (deviceLayerCount > 0)
594 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400595 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
596 deviceLayerProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500597 }
598
599 uint32_t deviceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400600 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
601 &deviceExtensionCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500602
603 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
604 if (deviceExtensionCount > 0)
605 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400606 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
607 &deviceExtensionCount,
608 deviceExtensionProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500609 }
610
Yuly Novikov199f4292018-01-19 19:04:05 -0500611 const char *const *enabledLayerNames = nullptr;
612 uint32_t enabledLayerCount = 0;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500613 if (mEnableValidationLayers)
614 {
Yuly Novikov199f4292018-01-19 19:04:05 -0500615 mEnableValidationLayers = GetAvailableValidationLayers(
616 deviceLayerProps, false, &enabledLayerNames, &enabledLayerCount);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500617 }
618
619 std::vector<const char *> enabledDeviceExtensions;
620 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
621
Ian Elliottbcb78902018-12-19 11:46:29 -0700622 initFeatures(deviceExtensionProps);
623 mFeaturesInitialized = true;
624
Luc Ferronbf6dc372018-06-28 15:24:19 -0400625 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
Ian Elliott52f5da42018-12-21 09:02:09 -0700626 if ((getFeatures().flipViewportY) &&
627 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
Luc Ferronbf6dc372018-06-28 15:24:19 -0400628 {
629 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
630 }
Ian Elliottbcb78902018-12-19 11:46:29 -0700631 if (getFeatures().supportsIncrementalPresent)
632 {
633 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
634 }
Luc Ferronbf6dc372018-06-28 15:24:19 -0400635
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400636 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionProps, enabledDeviceExtensions));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500637
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400638 // Select additional features to be enabled
639 VkPhysicalDeviceFeatures enabledFeatures = {};
640 enabledFeatures.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
641
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400642 VkDeviceQueueCreateInfo queueCreateInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500643
644 float zeroPriority = 0.0f;
645
646 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500647 queueCreateInfo.flags = 0;
648 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
649 queueCreateInfo.queueCount = 1;
650 queueCreateInfo.pQueuePriorities = &zeroPriority;
651
652 // Initialize the device
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400653 VkDeviceCreateInfo createInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500654
Jamie Madill50cf2be2018-06-15 09:46:57 -0400655 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
Jamie Madill50cf2be2018-06-15 09:46:57 -0400656 createInfo.flags = 0;
657 createInfo.queueCreateInfoCount = 1;
658 createInfo.pQueueCreateInfos = &queueCreateInfo;
Yuly Novikov199f4292018-01-19 19:04:05 -0500659 createInfo.enabledLayerCount = enabledLayerCount;
660 createInfo.ppEnabledLayerNames = enabledLayerNames;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500661 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
662 createInfo.ppEnabledExtensionNames =
663 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400664 createInfo.pEnabledFeatures = &enabledFeatures;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500665
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400666 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500667
668 mCurrentQueueFamilyIndex = queueFamilyIndex;
669
670 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
671
672 // Initialize the command pool now that we know the queue family index.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400673 VkCommandPoolCreateInfo commandPoolInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -0500674 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
675 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
676 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500677
Yuly Novikov27780292018-11-09 11:19:49 -0500678 ANGLE_VK_TRY(displayVk, mCommandPool.init(mDevice, commandPoolInfo));
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400679
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400680 // Initialize the vulkan pipeline cache.
Jamie Madilldc65c5b2018-11-21 11:07:26 -0500681 ANGLE_TRY(initPipelineCache(displayVk));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500682
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400683 // Initialize the submission semaphore pool.
684 ANGLE_TRY(mSubmitSemaphorePool.init(displayVk, vk::kDefaultSemaphorePoolSize));
685
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400686#if ANGLE_ENABLE_VULKAN_GPU_TRACE_EVENTS
687 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
688 ASSERT(platform);
689
690 // GPU tracing workaround for anglebug.com/2927. The renderer should not emit gpu events during
691 // platform discovery.
692 const unsigned char *gpuEventsEnabled =
693 platform->getTraceCategoryEnabledFlag(platform, "gpu.angle.gpu");
694 mGpuEventsEnabled = gpuEventsEnabled && *gpuEventsEnabled;
695#endif
696
697 if (mGpuEventsEnabled)
698 {
699 // Calculate the difference between CPU and GPU clocks for GPU event reporting.
700 ANGLE_TRY(mGpuEventQueryPool.init(displayVk, VK_QUERY_TYPE_TIMESTAMP,
701 vk::kDefaultTimestampQueryPoolSize));
702 ANGLE_TRY(synchronizeCpuGpuTime(displayVk));
703 }
704
Jamie Madill7c985f52018-11-29 18:16:17 -0500705 return angle::Result::Continue;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500706}
707
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400708angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400709 VkSurfaceKHR surface,
710 uint32_t *presentQueueOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500711{
712 // We've already initialized a device, and can't re-create it unless it's never been used.
713 // TODO(jmadill): Handle the re-creation case if necessary.
714 if (mDevice != VK_NULL_HANDLE)
715 {
716 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
717
718 // Check if the current device supports present on this surface.
719 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400720 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400721 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
Jamie Madill4d0bf552016-12-28 15:45:24 -0500722 surface, &supportsPresent));
723
Jamie Madill6cad7732018-07-11 09:01:17 -0400724 if (supportsPresent == VK_TRUE)
725 {
726 *presentQueueOut = mCurrentQueueFamilyIndex;
Jamie Madill7c985f52018-11-29 18:16:17 -0500727 return angle::Result::Continue;
Jamie Madill6cad7732018-07-11 09:01:17 -0400728 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500729 }
730
731 // Find a graphics and present queue.
732 Optional<uint32_t> newPresentQueue;
733 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500734 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500735 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
736 {
737 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500738 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500739 {
740 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400741 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
742 mPhysicalDevice, queueIndex, surface, &supportsPresent));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500743
744 if (supportsPresent == VK_TRUE)
745 {
746 newPresentQueue = queueIndex;
747 break;
748 }
749 }
750 }
751
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400752 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
753 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500754
Jamie Madill6cad7732018-07-11 09:01:17 -0400755 *presentQueueOut = newPresentQueue.value();
Jamie Madill7c985f52018-11-29 18:16:17 -0500756 return angle::Result::Continue;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500757}
758
759std::string RendererVk::getVendorString() const
760{
Olli Etuahoc6a06182018-04-13 14:11:46 +0300761 return GetVendorString(mPhysicalDeviceProperties.vendorID);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500762}
763
Jamie Madille09bd5d2016-11-29 16:20:35 -0500764std::string RendererVk::getRendererDescription() const
765{
Jamie Madill4d0bf552016-12-28 15:45:24 -0500766 std::stringstream strstr;
767
768 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
769
770 strstr << "Vulkan ";
771 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
772 strstr << VK_VERSION_MINOR(apiVersion) << ".";
773 strstr << VK_VERSION_PATCH(apiVersion);
774
Olli Etuahoc6a06182018-04-13 14:11:46 +0300775 strstr << "(";
776
777 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
778 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
779 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
780 // driver detection.
781 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
782 {
783 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
784 }
785
Geoff Langa7af56b2018-12-14 14:20:28 -0500786 strstr << mPhysicalDeviceProperties.deviceName;
787 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
788
789 strstr << ")";
Jamie Madill4d0bf552016-12-28 15:45:24 -0500790
791 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -0500792}
793
Shahbaz Youssefi092481a2018-11-08 00:25:50 -0500794gl::Version RendererVk::getMaxSupportedESVersion() const
795{
796 // Declare GLES2 support if necessary features for GLES3 are missing
797 bool necessaryFeaturesForES3 = mPhysicalDeviceFeatures.inheritedQueries;
798
799 if (!necessaryFeaturesForES3)
800 {
801 return gl::Version(2, 0);
802 }
803
804 return gl::Version(3, 0);
805}
806
Ian Elliottbcb78902018-12-19 11:46:29 -0700807void RendererVk::initFeatures(const std::vector<VkExtensionProperties> &deviceExtensionProps)
Jamie Madill12222072018-07-11 14:59:48 -0400808{
Jamie Madillb36a4812018-09-25 10:15:11 -0400809// Use OpenGL line rasterization rules by default.
810// TODO(jmadill): Fix Android support. http://anglebug.com/2830
811#if defined(ANGLE_PLATFORM_ANDROID)
812 mFeatures.basicGLLineRasterization = false;
813#else
Jamie Madill12222072018-07-11 14:59:48 -0400814 mFeatures.basicGLLineRasterization = true;
Jamie Madillb36a4812018-09-25 10:15:11 -0400815#endif // defined(ANGLE_PLATFORM_ANDROID)
Jamie Madill12222072018-07-11 14:59:48 -0400816
Ian Elliott52f5da42018-12-21 09:02:09 -0700817 if ((mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
818 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionProps))
Ian Elliottd50521f2018-12-20 12:05:14 -0700819 {
820 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
821 // investigation. http://anglebug.com/2728
822 mFeatures.flipViewportY = !IsIntel(mPhysicalDeviceProperties.vendorID);
823 }
Frank Henigmanbeb669d2018-09-21 16:25:52 -0400824
825#ifdef ANGLE_PLATFORM_WINDOWS
826 // http://anglebug.com/2838
827 mFeatures.extraCopyBufferRegion = IsIntel(mPhysicalDeviceProperties.vendorID);
828#endif
Shahbaz Youssefid856ca42018-10-31 16:55:12 -0400829
830 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
831 platform->overrideFeaturesVk(platform, &mFeatures);
Jamie Madillfde74c02018-11-18 16:12:02 -0500832
833 // Work around incorrect NVIDIA point size range clamping.
834 // TODO(jmadill): Narrow driver range once fixed. http://anglebug.com/2970
835 if (IsNvidia(mPhysicalDeviceProperties.vendorID))
836 {
837 mFeatures.clampPointSize = true;
838 }
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +0100839
840#if defined(ANGLE_PLATFORM_ANDROID)
Shahbaz Youssefib08457d2018-12-11 15:13:54 -0500841 // Work around ineffective compute-graphics barriers on Nexus 5X.
842 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
843 // http://anglebug.com/3019
844 mFeatures.flushAfterVertexConversion =
845 IsNexus5X(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +0100846#endif
Ian Elliottbcb78902018-12-19 11:46:29 -0700847
848 if (ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionProps))
849 {
850 mFeatures.supportsIncrementalPresent = true;
851 }
Jamie Madill12222072018-07-11 14:59:48 -0400852}
853
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400854void RendererVk::initPipelineCacheVkKey()
855{
856 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
857 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
858 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
859 // there is no '\0' in the result.
860 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
861 {
862 hashStream << std::hex << c;
863 }
864 // Add the vendor and device id too for good measure.
865 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
866 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
867
868 const std::string &hashString = hashStream.str();
869 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
870 hashString.length(), mPipelineCacheVkBlobKey.data());
871}
872
Jamie Madilldc65c5b2018-11-21 11:07:26 -0500873angle::Result RendererVk::initPipelineCache(DisplayVk *display)
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400874{
875 initPipelineCacheVkKey();
876
877 egl::BlobCache::Value initialData;
878 bool success = display->getBlobCache()->get(display->getScratchBuffer(),
879 mPipelineCacheVkBlobKey, &initialData);
880
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400881 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400882
883 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400884 pipelineCacheCreateInfo.flags = 0;
885 pipelineCacheCreateInfo.initialDataSize = success ? initialData.size() : 0;
886 pipelineCacheCreateInfo.pInitialData = success ? initialData.data() : nullptr;
887
Jamie Madilldc65c5b2018-11-21 11:07:26 -0500888 ANGLE_VK_TRY(display, mPipelineCache.init(mDevice, pipelineCacheCreateInfo));
Jamie Madill7c985f52018-11-29 18:16:17 -0500889 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400890}
891
Jamie Madillacccc6c2016-05-03 17:22:10 -0400892void RendererVk::ensureCapsInitialized() const
893{
894 if (!mCapsInitialized)
895 {
Shahbaz Youssefic2b576d2018-10-12 14:45:34 -0400896 ASSERT(mCurrentQueueFamilyIndex < mQueueFamilyProperties.size());
897 vk::GenerateCaps(mPhysicalDeviceProperties, mPhysicalDeviceFeatures,
898 mQueueFamilyProperties[mCurrentQueueFamilyIndex], mNativeTextureCaps,
Jamie Madill30b5d842018-08-31 17:19:12 -0400899 &mNativeCaps, &mNativeExtensions, &mNativeLimitations);
Jamie Madillacccc6c2016-05-03 17:22:10 -0400900 mCapsInitialized = true;
901 }
902}
903
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400904void RendererVk::getSubmitWaitSemaphores(
905 vk::Context *context,
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400906 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> *waitSemaphores,
907 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> *waitStageMasks)
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400908{
909 if (mSubmitLastSignaledSemaphore.getSemaphore())
910 {
911 waitSemaphores->push_back(mSubmitLastSignaledSemaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400912 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400913
914 // Return the semaphore to the pool (which will remain valid and unused until the
915 // queue it's about to be waited on has finished execution).
916 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
917 }
918
919 for (vk::SemaphoreHelper &semaphore : mSubmitWaitSemaphores)
920 {
921 waitSemaphores->push_back(semaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400922 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
923
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400924 mSubmitSemaphorePool.freeSemaphore(context, &semaphore);
925 }
926 mSubmitWaitSemaphores.clear();
927}
928
Jamie Madillacccc6c2016-05-03 17:22:10 -0400929const gl::Caps &RendererVk::getNativeCaps() const
930{
931 ensureCapsInitialized();
932 return mNativeCaps;
933}
934
935const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
936{
937 ensureCapsInitialized();
938 return mNativeTextureCaps;
939}
940
941const gl::Extensions &RendererVk::getNativeExtensions() const
942{
943 ensureCapsInitialized();
944 return mNativeExtensions;
945}
946
947const gl::Limitations &RendererVk::getNativeLimitations() const
948{
949 ensureCapsInitialized();
950 return mNativeLimitations;
951}
952
Luc Ferrondaedf4d2018-03-16 09:28:53 -0400953uint32_t RendererVk::getMaxActiveTextures()
954{
955 // TODO(lucferron): expose this limitation to GL in Context Caps
956 return std::min<uint32_t>(mPhysicalDeviceProperties.limits.maxPerStageDescriptorSamplers,
957 gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES);
958}
959
Jamie Madill49ac74b2017-12-21 14:42:33 -0500960const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -0500961{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500962 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500963}
964
Jamie Madill21061022018-07-12 23:56:30 -0400965angle::Result RendererVk::finish(vk::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500966{
Jamie Madill1f46bc12018-02-20 16:09:43 -0500967 if (!mCommandGraph.empty())
Jamie Madill49ac74b2017-12-21 14:42:33 -0500968 {
Shahbaz Youssefi61656022018-10-24 15:00:50 -0400969 TRACE_EVENT0("gpu.angle", "RendererVk::finish");
970
Luc Ferron1617e692018-07-11 11:08:19 -0400971 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
972 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400973
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400974 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400975 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
976 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400977
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400978 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -0500979 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400980 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
981 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400982 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500983 submitInfo.commandBufferCount = 1;
Luc Ferron1617e692018-07-11 11:08:19 -0400984 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -0500985 submitInfo.signalSemaphoreCount = 0;
986 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500987
Jamie Madill21061022018-07-12 23:56:30 -0400988 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBatch.get())));
Jamie Madill49ac74b2017-12-21 14:42:33 -0500989 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500990
Jamie Madill4c26fc22017-02-24 11:04:10 -0500991 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill21061022018-07-12 23:56:30 -0400992 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -0400993 freeAllInFlightResources();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400994
995 if (mGpuEventsEnabled)
996 {
Shahbaz Youssefi749589f2018-10-25 12:48:49 -0400997 // This loop should in practice execute once since the queue is already idle.
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400998 while (mInFlightGpuEventQueries.size() > 0)
999 {
1000 ANGLE_TRY(checkCompletedGpuEvents(context));
1001 }
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001002 // Recalculate the CPU/GPU time difference to account for clock drifting. Avoid unnecessary
1003 // synchronization if there is no event to be adjusted (happens when finish() gets called
1004 // multiple times towards the end of the application).
1005 if (mGpuEvents.size() > 0)
1006 {
1007 ANGLE_TRY(synchronizeCpuGpuTime(context));
1008 }
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001009 }
1010
Jamie Madill7c985f52018-11-29 18:16:17 -05001011 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001012}
1013
Jamie Madill0c0dc342017-03-24 14:18:51 -04001014void RendererVk::freeAllInFlightResources()
1015{
Jamie Madill49ac74b2017-12-21 14:42:33 -05001016 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -04001017 {
Yuly Novikovb56ddbb2018-11-02 16:53:18 -04001018 // On device loss we need to wait for fence to be signaled before destroying it
1019 if (mDeviceLost)
1020 {
1021 VkResult status = batch.fence.wait(mDevice, kMaxFenceWaitTimeNs);
1022 // If wait times out, it is probably not possible to recover from lost device
1023 ASSERT(status == VK_SUCCESS || status == VK_ERROR_DEVICE_LOST);
1024 }
Jamie Madill49ac74b2017-12-21 14:42:33 -05001025 batch.fence.destroy(mDevice);
1026 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001027 }
1028 mInFlightCommands.clear();
1029
1030 for (auto &garbage : mGarbage)
1031 {
Jamie Madille88ec8e2017-10-31 17:18:14 -04001032 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001033 }
1034 mGarbage.clear();
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001035
1036 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
Jamie Madill0c0dc342017-03-24 14:18:51 -04001037}
1038
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001039angle::Result RendererVk::checkCompletedCommands(vk::Context *context)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001040{
Jamie Madill49ac74b2017-12-21 14:42:33 -05001041 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -05001042
Jamie Madill49ac74b2017-12-21 14:42:33 -05001043 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001044 {
Yuly Novikov27780292018-11-09 11:19:49 -05001045 VkResult result = batch.fence.getStatus(mDevice);
1046 if (result == VK_NOT_READY)
1047 {
Jamie Madill0c0dc342017-03-24 14:18:51 -04001048 break;
Yuly Novikov27780292018-11-09 11:19:49 -05001049 }
1050 ANGLE_VK_TRY(context, result);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001051
Jamie Madill49ac74b2017-12-21 14:42:33 -05001052 ASSERT(batch.serial > mLastCompletedQueueSerial);
1053 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -04001054
Jamie Madill49ac74b2017-12-21 14:42:33 -05001055 batch.fence.destroy(mDevice);
1056 batch.commandPool.destroy(mDevice);
1057 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001058 }
1059
Jamie Madill49ac74b2017-12-21 14:42:33 -05001060 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001061
1062 size_t freeIndex = 0;
1063 for (; freeIndex < mGarbage.size(); ++freeIndex)
1064 {
Jamie Madill49ac74b2017-12-21 14:42:33 -05001065 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -04001066 break;
1067 }
1068
1069 // Remove the entries from the garbage list - they should be ready to go.
1070 if (freeIndex > 0)
1071 {
1072 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -05001073 }
1074
Jamie Madill7c985f52018-11-29 18:16:17 -05001075 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001076}
1077
Jamie Madill21061022018-07-12 23:56:30 -04001078angle::Result RendererVk::submitFrame(vk::Context *context,
1079 const VkSubmitInfo &submitInfo,
1080 vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001081{
Tobin Ehlis573f76b2018-05-03 11:10:44 -06001082 TRACE_EVENT0("gpu.angle", "RendererVk::submitFrame");
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001083 VkFenceCreateInfo fenceInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -05001084 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1085 fenceInfo.flags = 0;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001086
Jamie Madillbea35a62018-07-05 11:54:10 -04001087 vk::Scoped<CommandBatch> scopedBatch(mDevice);
1088 CommandBatch &batch = scopedBatch.get();
Yuly Novikov27780292018-11-09 11:19:49 -05001089 ANGLE_VK_TRY(context, batch.fence.init(mDevice, fenceInfo));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001090
Jamie Madill21061022018-07-12 23:56:30 -04001091 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -05001092
1093 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -05001094 batch.commandPool = std::move(mCommandPool);
1095 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001096
Jamie Madillbea35a62018-07-05 11:54:10 -04001097 mInFlightCommands.emplace_back(scopedBatch.release());
Jamie Madill0c0dc342017-03-24 14:18:51 -04001098
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001099 // CPU should be throttled to avoid mInFlightCommands from growing too fast. That is done on
1100 // swap() though, and there could be multiple submissions in between (through glFlush() calls),
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +01001101 // so the limit is larger than the expected number of images. The
1102 // InterleavedAttributeDataBenchmark perf test for example issues a large number of flushes.
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001103 ASSERT(mInFlightCommands.size() <= kInFlightCommandsLimit);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001104
1105 // Increment the queue serial. If this fails, we should restart ANGLE.
Jamie Madillfb05bcb2017-06-07 15:43:18 -04001106 // TODO(jmadill): Overflow check.
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001107 mLastSubmittedQueueSerial = mCurrentQueueSerial;
Jamie Madillb980c562018-11-27 11:34:27 -05001108 mCurrentQueueSerial = mQueueSerialFactory.generate();
Jamie Madill0c0dc342017-03-24 14:18:51 -04001109
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001110 ANGLE_TRY(checkCompletedCommands(context));
Jamie Madill0c0dc342017-03-24 14:18:51 -04001111
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001112 if (mGpuEventsEnabled)
1113 {
1114 ANGLE_TRY(checkCompletedGpuEvents(context));
1115 }
1116
Jamie Madill49ac74b2017-12-21 14:42:33 -05001117 // Simply null out the command buffer here - it was allocated using the command pool.
1118 commandBuffer.releaseHandle();
1119
1120 // Reallocate the command pool for next frame.
1121 // TODO(jmadill): Consider reusing command pools.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001122 VkCommandPoolCreateInfo poolInfo = {};
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001123 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001124 poolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001125 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001126
Yuly Novikov27780292018-11-09 11:19:49 -05001127 ANGLE_VK_TRY(context, mCommandPool.init(mDevice, poolInfo));
Jamie Madill7c985f52018-11-29 18:16:17 -05001128 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001129}
1130
Jamie Madillaaca96e2018-06-12 10:19:48 -04001131bool RendererVk::isSerialInUse(Serial serial) const
Jamie Madill97760352017-11-09 13:08:29 -05001132{
1133 return serial > mLastCompletedQueueSerial;
1134}
1135
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001136angle::Result RendererVk::finishToSerial(vk::Context *context, Serial serial)
1137{
1138 if (!isSerialInUse(serial) || mInFlightCommands.empty())
1139 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001140 return angle::Result::Continue;
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001141 }
1142
1143 // Find the first batch with serial equal to or bigger than given serial (note that
1144 // the batch serials are unique, otherwise upper-bound would have been necessary).
1145 size_t batchIndex = mInFlightCommands.size() - 1;
1146 for (size_t i = 0; i < mInFlightCommands.size(); ++i)
1147 {
1148 if (mInFlightCommands[i].serial >= serial)
1149 {
1150 batchIndex = i;
1151 break;
1152 }
1153 }
1154 const CommandBatch &batch = mInFlightCommands[batchIndex];
1155
1156 // Wait for it finish
Yuly Novikov27780292018-11-09 11:19:49 -05001157 ANGLE_VK_TRY(context, batch.fence.wait(mDevice, kMaxFenceWaitTimeNs));
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001158
1159 // Clean up finished batches.
1160 return checkCompletedCommands(context);
1161}
1162
Jamie Madill21061022018-07-12 23:56:30 -04001163angle::Result RendererVk::getCompatibleRenderPass(vk::Context *context,
1164 const vk::RenderPassDesc &desc,
1165 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001166{
Jamie Madill21061022018-07-12 23:56:30 -04001167 return mRenderPassCache.getCompatibleRenderPass(context, mCurrentQueueSerial, desc,
Jamie Madill9f2a8612017-11-30 12:43:09 -05001168 renderPassOut);
1169}
1170
Jamie Madill21061022018-07-12 23:56:30 -04001171angle::Result RendererVk::getRenderPassWithOps(vk::Context *context,
1172 const vk::RenderPassDesc &desc,
1173 const vk::AttachmentOpsArray &ops,
1174 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001175{
Jamie Madill21061022018-07-12 23:56:30 -04001176 return mRenderPassCache.getRenderPassWithOps(context, mCurrentQueueSerial, desc, ops,
Jamie Madillbef918c2017-12-13 13:11:30 -05001177 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -05001178}
1179
Jamie Madilla5e06072018-05-18 14:36:05 -04001180vk::CommandGraph *RendererVk::getCommandGraph()
Jamie Madill49ac74b2017-12-21 14:42:33 -05001181{
Jamie Madilla5e06072018-05-18 14:36:05 -04001182 return &mCommandGraph;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001183}
1184
Jamie Madill21061022018-07-12 23:56:30 -04001185angle::Result RendererVk::flushCommandGraph(vk::Context *context, vk::CommandBuffer *commandBatch)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001186{
Jamie Madill21061022018-07-12 23:56:30 -04001187 return mCommandGraph.submitCommands(context, mCurrentQueueSerial, &mRenderPassCache,
Jamie Madill1f46bc12018-02-20 16:09:43 -05001188 &mCommandPool, commandBatch);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001189}
1190
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001191angle::Result RendererVk::flush(vk::Context *context)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001192{
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001193 if (mCommandGraph.empty())
1194 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001195 return angle::Result::Continue;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001196 }
1197
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001198 TRACE_EVENT0("gpu.angle", "RendererVk::flush");
1199
Jamie Madillbea35a62018-07-05 11:54:10 -04001200 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1201 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001202
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001203 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001204 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1205 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001206
1207 // On every flush, create a semaphore to be signaled. On the next submission, this semaphore
1208 // will be waited on.
1209 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &mSubmitLastSignaledSemaphore));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001210
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001211 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -05001212 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001213 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1214 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001215 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001216 submitInfo.commandBufferCount = 1;
Jamie Madillbea35a62018-07-05 11:54:10 -04001217 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001218 submitInfo.signalSemaphoreCount = 1;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001219 submitInfo.pSignalSemaphores = mSubmitLastSignaledSemaphore.getSemaphore()->ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001220
Jamie Madill21061022018-07-12 23:56:30 -04001221 ANGLE_TRY(submitFrame(context, submitInfo, commandBatch.release()));
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001222
Jamie Madill7c985f52018-11-29 18:16:17 -05001223 return angle::Result::Continue;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001224}
1225
Jamie Madill78feddc2018-04-27 11:45:05 -04001226Serial RendererVk::issueShaderSerial()
Jamie Madillf2f6d372018-01-10 21:37:23 -05001227{
Jamie Madill78feddc2018-04-27 11:45:05 -04001228 return mShaderSerialFactory.generate();
Jamie Madillf2f6d372018-01-10 21:37:23 -05001229}
1230
Jamie Madill21061022018-07-12 23:56:30 -04001231angle::Result RendererVk::getDescriptorSetLayout(
1232 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001233 const vk::DescriptorSetLayoutDesc &desc,
1234 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1235{
Jamie Madill21061022018-07-12 23:56:30 -04001236 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
Jamie Madill9b168d02018-06-13 13:25:32 -04001237}
1238
Jamie Madill21061022018-07-12 23:56:30 -04001239angle::Result RendererVk::getPipelineLayout(
1240 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001241 const vk::PipelineLayoutDesc &desc,
1242 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1243 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1244{
Jamie Madill21061022018-07-12 23:56:30 -04001245 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
Jamie Madill9b168d02018-06-13 13:25:32 -04001246 pipelineLayoutOut);
1247}
1248
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001249angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1250{
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001251 ASSERT(mPipelineCache.valid());
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001252
1253 if (--mPipelineCacheVkUpdateTimeout > 0)
1254 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001255 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001256 }
1257
1258 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1259
1260 // Get the size of the cache.
1261 size_t pipelineCacheSize = 0;
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001262 VkResult result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, nullptr);
Yuly Novikov27780292018-11-09 11:19:49 -05001263 if (result != VK_INCOMPLETE)
1264 {
1265 ANGLE_VK_TRY(displayVk, result);
1266 }
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001267
1268 angle::MemoryBuffer *pipelineCacheData = nullptr;
1269 ANGLE_VK_CHECK_ALLOC(displayVk,
1270 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1271
1272 size_t originalPipelineCacheSize = pipelineCacheSize;
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001273 result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001274 // Note: currently we don't accept incomplete as we don't expect it (the full size of cache
1275 // was determined just above), so receiving it hints at an implementation bug we would want
1276 // to know about early.
Yuly Novikov27780292018-11-09 11:19:49 -05001277 ASSERT(result != VK_INCOMPLETE);
1278 ANGLE_VK_TRY(displayVk, result);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001279
1280 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1281 // the buffer to avoid leaking garbage memory.
1282 ASSERT(pipelineCacheSize <= originalPipelineCacheSize);
1283 if (pipelineCacheSize < originalPipelineCacheSize)
1284 {
1285 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1286 originalPipelineCacheSize - pipelineCacheSize);
1287 }
1288
1289 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1290
Jamie Madill7c985f52018-11-29 18:16:17 -05001291 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001292}
1293
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001294angle::Result RendererVk::allocateSubmitWaitSemaphore(vk::Context *context,
1295 const vk::Semaphore **outSemaphore)
1296{
1297 ASSERT(mSubmitWaitSemaphores.size() < mSubmitWaitSemaphores.max_size());
1298
1299 vk::SemaphoreHelper semaphore;
1300 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &semaphore));
1301
1302 mSubmitWaitSemaphores.push_back(std::move(semaphore));
1303 *outSemaphore = mSubmitWaitSemaphores.back().getSemaphore();
1304
Jamie Madill7c985f52018-11-29 18:16:17 -05001305 return angle::Result::Continue;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001306}
1307
1308const vk::Semaphore *RendererVk::getSubmitLastSignaledSemaphore(vk::Context *context)
1309{
1310 const vk::Semaphore *semaphore = mSubmitLastSignaledSemaphore.getSemaphore();
1311
1312 // Return the semaphore to the pool (which will remain valid and unused until the
1313 // queue it's about to be waited on has finished execution). The caller is about
1314 // to wait on it.
1315 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
1316
1317 return semaphore;
1318}
1319
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001320angle::Result RendererVk::getTimestamp(vk::Context *context, uint64_t *timestampOut)
1321{
1322 // The intent of this function is to query the timestamp without stalling the GPU. Currently,
1323 // that seems impossible, so instead, we are going to make a small submission with just a
1324 // timestamp query. First, the disjoint timer query extension says:
1325 //
1326 // > This will return the GL time after all previous commands have reached the GL server but
1327 // have not yet necessarily executed.
1328 //
1329 // The previous commands are stored in the command graph at the moment and are not yet flushed.
1330 // The wording allows us to make a submission to get the timestamp without performing a flush.
1331 //
1332 // Second:
1333 //
1334 // > By using a combination of this synchronous get command and the asynchronous timestamp query
1335 // object target, applications can measure the latency between when commands reach the GL server
1336 // and when they are realized in the framebuffer.
1337 //
1338 // This fits with the above strategy as well, although inevitably we are possibly introducing a
1339 // GPU bubble. This function directly generates a command buffer and submits it instead of
1340 // using the other member functions. This is to avoid changing any state, such as the queue
1341 // serial.
1342
1343 // Create a query used to receive the GPU timestamp
1344 vk::Scoped<vk::DynamicQueryPool> timestampQueryPool(mDevice);
1345 vk::QueryHelper timestampQuery;
1346 ANGLE_TRY(timestampQueryPool.get().init(context, VK_QUERY_TYPE_TIMESTAMP, 1));
1347 ANGLE_TRY(timestampQueryPool.get().allocateQuery(context, &timestampQuery));
1348
1349 // Record the command buffer
1350 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1351 vk::CommandBuffer &commandBuffer = commandBatch.get();
1352
1353 VkCommandBufferAllocateInfo commandBufferInfo = {};
1354 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1355 commandBufferInfo.commandPool = mCommandPool.getHandle();
1356 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1357 commandBufferInfo.commandBufferCount = 1;
1358
Yuly Novikov27780292018-11-09 11:19:49 -05001359 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001360
1361 VkCommandBufferBeginInfo beginInfo = {};
1362 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1363 beginInfo.flags = 0;
1364 beginInfo.pInheritanceInfo = nullptr;
1365
Yuly Novikov27780292018-11-09 11:19:49 -05001366 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001367
1368 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1369 timestampQuery.getQuery(), 1);
1370 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1371 timestampQuery.getQueryPool()->getHandle(),
1372 timestampQuery.getQuery());
1373
Yuly Novikov27780292018-11-09 11:19:49 -05001374 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001375
1376 // Create fence for the submission
1377 VkFenceCreateInfo fenceInfo = {};
1378 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1379 fenceInfo.flags = 0;
1380
1381 vk::Scoped<vk::Fence> fence(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001382 ANGLE_VK_TRY(context, fence.get().init(mDevice, fenceInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001383
1384 // Submit the command buffer
1385 VkSubmitInfo submitInfo = {};
1386 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1387 submitInfo.waitSemaphoreCount = 0;
1388 submitInfo.pWaitSemaphores = nullptr;
1389 submitInfo.pWaitDstStageMask = nullptr;
1390 submitInfo.commandBufferCount = 1;
1391 submitInfo.pCommandBuffers = commandBuffer.ptr();
1392 submitInfo.signalSemaphoreCount = 0;
1393 submitInfo.pSignalSemaphores = nullptr;
1394
1395 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, fence.get().getHandle()));
1396
1397 // Wait for the submission to finish. Given no semaphores, there is hope that it would execute
1398 // in parallel with what's already running on the GPU.
Yuly Novikov27780292018-11-09 11:19:49 -05001399 ANGLE_VK_TRY(context, fence.get().wait(mDevice, kMaxFenceWaitTimeNs));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001400
1401 // Get the query results
1402 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
1403
Yuly Novikov27780292018-11-09 11:19:49 -05001404 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
1405 mDevice, timestampQuery.getQuery(), 1, sizeof(*timestampOut),
1406 timestampOut, sizeof(*timestampOut), queryFlags));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001407
1408 timestampQueryPool.get().freeQuery(context, &timestampQuery);
1409
Jamie Madill7c985f52018-11-29 18:16:17 -05001410 return angle::Result::Continue;
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001411}
1412
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -05001413// These functions look at the mandatory format for support, and fallback to querying the device (if
1414// necessary) to test the availability of the bits.
1415bool RendererVk::hasLinearTextureFormatFeatureBits(VkFormat format,
1416 const VkFormatFeatureFlags featureBits)
1417{
1418 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1419}
1420
1421bool RendererVk::hasTextureFormatFeatureBits(VkFormat format,
1422 const VkFormatFeatureFlags featureBits)
1423{
1424 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1425}
1426
1427bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1428{
1429 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1430}
1431
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001432angle::Result RendererVk::synchronizeCpuGpuTime(vk::Context *context)
1433{
1434 ASSERT(mGpuEventsEnabled);
1435
1436 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1437 ASSERT(platform);
1438
1439 // To synchronize CPU and GPU times, we need to get the CPU timestamp as close as possible to
1440 // the GPU timestamp. The process of getting the GPU timestamp is as follows:
1441 //
1442 // CPU GPU
1443 //
1444 // Record command buffer
1445 // with timestamp query
1446 //
1447 // Submit command buffer
1448 //
1449 // Post-submission work Begin execution
1450 //
1451 // ???? Write timstamp Tgpu
1452 //
1453 // ???? End execution
1454 //
1455 // ???? Return query results
1456 //
1457 // ????
1458 //
1459 // Get query results
1460 //
1461 // The areas of unknown work (????) on the CPU indicate that the CPU may or may not have
1462 // finished post-submission work while the GPU is executing in parallel. With no further work,
1463 // querying CPU timestamps before submission and after getting query results give the bounds to
1464 // Tgpu, which could be quite large.
1465 //
1466 // Using VkEvents, the GPU can be made to wait for the CPU and vice versa, in an effort to
1467 // reduce this range. This function implements the following procedure:
1468 //
1469 // CPU GPU
1470 //
1471 // Record command buffer
1472 // with timestamp query
1473 //
1474 // Submit command buffer
1475 //
1476 // Post-submission work Begin execution
1477 //
1478 // ???? Set Event GPUReady
1479 //
1480 // Wait on Event GPUReady Wait on Event CPUReady
1481 //
1482 // Get CPU Time Ts Wait on Event CPUReady
1483 //
1484 // Set Event CPUReady Wait on Event CPUReady
1485 //
1486 // Get CPU Time Tcpu Get GPU Time Tgpu
1487 //
1488 // Wait on Event GPUDone Set Event GPUDone
1489 //
1490 // Get CPU Time Te End Execution
1491 //
1492 // Idle Return query results
1493 //
1494 // Get query results
1495 //
1496 // If Te-Ts > epsilon, a GPU or CPU interruption can be assumed and the operation can be
1497 // retried. Once Te-Ts < epsilon, Tcpu can be taken to presumably match Tgpu. Finding an
1498 // epsilon that's valid for all devices may be difficult, so the loop can be performed only a
1499 // limited number of times and the Tcpu,Tgpu pair corresponding to smallest Te-Ts used for
1500 // calibration.
1501 //
1502 // Note: Once VK_EXT_calibrated_timestamps is ubiquitous, this should be redone.
1503
1504 // Make sure nothing is running
1505 ASSERT(mCommandGraph.empty());
1506
1507 TRACE_EVENT0("gpu.angle", "RendererVk::synchronizeCpuGpuTime");
1508
1509 // Create a query used to receive the GPU timestamp
1510 vk::QueryHelper timestampQuery;
1511 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &timestampQuery));
1512
1513 // Create the three events
1514 VkEventCreateInfo eventCreateInfo = {};
1515 eventCreateInfo.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
1516 eventCreateInfo.flags = 0;
1517
1518 vk::Scoped<vk::Event> cpuReady(mDevice), gpuReady(mDevice), gpuDone(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001519 ANGLE_VK_TRY(context, cpuReady.get().init(mDevice, eventCreateInfo));
1520 ANGLE_VK_TRY(context, gpuReady.get().init(mDevice, eventCreateInfo));
1521 ANGLE_VK_TRY(context, gpuDone.get().init(mDevice, eventCreateInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001522
1523 constexpr uint32_t kRetries = 10;
1524
1525 // Time suffixes used are S for seconds and Cycles for cycles
1526 double tightestRangeS = 1e6f;
1527 double TcpuS = 0;
1528 uint64_t TgpuCycles = 0;
1529 for (uint32_t i = 0; i < kRetries; ++i)
1530 {
1531 // Reset the events
Yuly Novikov27780292018-11-09 11:19:49 -05001532 ANGLE_VK_TRY(context, cpuReady.get().reset(mDevice));
1533 ANGLE_VK_TRY(context, gpuReady.get().reset(mDevice));
1534 ANGLE_VK_TRY(context, gpuDone.get().reset(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001535
1536 // Record the command buffer
1537 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1538 vk::CommandBuffer &commandBuffer = commandBatch.get();
1539
1540 VkCommandBufferAllocateInfo commandBufferInfo = {};
1541 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1542 commandBufferInfo.commandPool = mCommandPool.getHandle();
1543 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1544 commandBufferInfo.commandBufferCount = 1;
1545
Yuly Novikov27780292018-11-09 11:19:49 -05001546 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001547
1548 VkCommandBufferBeginInfo beginInfo = {};
1549 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1550 beginInfo.flags = 0;
1551 beginInfo.pInheritanceInfo = nullptr;
1552
Yuly Novikov27780292018-11-09 11:19:49 -05001553 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001554
1555 commandBuffer.setEvent(gpuReady.get(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
1556 commandBuffer.waitEvents(1, cpuReady.get().ptr(), VK_PIPELINE_STAGE_HOST_BIT,
1557 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, nullptr, 0, nullptr, 0,
1558 nullptr);
1559
1560 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1561 timestampQuery.getQuery(), 1);
1562 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1563 timestampQuery.getQueryPool()->getHandle(),
1564 timestampQuery.getQuery());
1565
1566 commandBuffer.setEvent(gpuDone.get(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
1567
Yuly Novikov27780292018-11-09 11:19:49 -05001568 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001569
1570 // Submit the command buffer
1571 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
1572 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1573 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
1574
1575 VkSubmitInfo submitInfo = {};
1576 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1577 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1578 submitInfo.pWaitSemaphores = waitSemaphores.data();
1579 submitInfo.pWaitDstStageMask = waitStageMasks.data();
1580 submitInfo.commandBufferCount = 1;
1581 submitInfo.pCommandBuffers = commandBuffer.ptr();
1582 submitInfo.signalSemaphoreCount = 0;
1583 submitInfo.pSignalSemaphores = nullptr;
1584
1585 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBuffer)));
1586
1587 // Wait for GPU to be ready. This is a short busy wait.
Yuly Novikov27780292018-11-09 11:19:49 -05001588 VkResult result = VK_EVENT_RESET;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001589 do
1590 {
Yuly Novikov27780292018-11-09 11:19:49 -05001591 result = gpuReady.get().getStatus(mDevice);
1592 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1593 {
1594 ANGLE_VK_TRY(context, result);
1595 }
1596 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001597
1598 double TsS = platform->monotonicallyIncreasingTime(platform);
1599
1600 // Tell the GPU to go ahead with the timestamp query.
Yuly Novikov27780292018-11-09 11:19:49 -05001601 ANGLE_VK_TRY(context, cpuReady.get().set(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001602 double cpuTimestampS = platform->monotonicallyIncreasingTime(platform);
1603
1604 // Wait for GPU to be done. Another short busy wait.
1605 do
1606 {
Yuly Novikov27780292018-11-09 11:19:49 -05001607 result = gpuDone.get().getStatus(mDevice);
1608 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1609 {
1610 ANGLE_VK_TRY(context, result);
1611 }
1612 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001613
1614 double TeS = platform->monotonicallyIncreasingTime(platform);
1615
1616 // Get the query results
1617 ANGLE_TRY(finishToSerial(context, getLastSubmittedQueueSerial()));
1618
1619 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
1620
1621 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05001622 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
1623 mDevice, timestampQuery.getQuery(), 1, sizeof(gpuTimestampCycles),
1624 &gpuTimestampCycles, sizeof(gpuTimestampCycles), queryFlags));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001625
1626 // Use the first timestamp queried as origin.
1627 if (mGpuEventTimestampOrigin == 0)
1628 {
1629 mGpuEventTimestampOrigin = gpuTimestampCycles;
1630 }
1631
1632 // Take these CPU and GPU timestamps if there is better confidence.
1633 double confidenceRangeS = TeS - TsS;
1634 if (confidenceRangeS < tightestRangeS)
1635 {
1636 tightestRangeS = confidenceRangeS;
1637 TcpuS = cpuTimestampS;
1638 TgpuCycles = gpuTimestampCycles;
1639 }
1640 }
1641
1642 mGpuEventQueryPool.freeQuery(context, &timestampQuery);
1643
1644 // timestampPeriod gives nanoseconds/cycle.
1645 double TgpuS = (TgpuCycles - mGpuEventTimestampOrigin) *
1646 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) /
1647 1'000'000'000.0;
1648
1649 flushGpuEvents(TgpuS, TcpuS);
1650
1651 mGpuClockSync.gpuTimestampS = TgpuS;
1652 mGpuClockSync.cpuTimestampS = TcpuS;
1653
Jamie Madill7c985f52018-11-29 18:16:17 -05001654 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001655}
1656
1657angle::Result RendererVk::traceGpuEventImpl(vk::Context *context,
1658 vk::CommandBuffer *commandBuffer,
1659 char phase,
1660 const char *name)
1661{
1662 ASSERT(mGpuEventsEnabled);
1663
1664 GpuEventQuery event;
1665
1666 event.name = name;
1667 event.phase = phase;
1668 event.serial = mCurrentQueueSerial;
1669
1670 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &event.queryPoolIndex, &event.queryIndex));
1671
1672 commandBuffer->resetQueryPool(
1673 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex, 1);
1674 commandBuffer->writeTimestamp(
1675 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1676 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex);
1677
1678 mInFlightGpuEventQueries.push_back(std::move(event));
1679
Jamie Madill7c985f52018-11-29 18:16:17 -05001680 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001681}
1682
1683angle::Result RendererVk::checkCompletedGpuEvents(vk::Context *context)
1684{
1685 ASSERT(mGpuEventsEnabled);
1686
1687 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1688 ASSERT(platform);
1689
1690 int finishedCount = 0;
1691
1692 for (GpuEventQuery &eventQuery : mInFlightGpuEventQueries)
1693 {
1694 // Only check the timestamp query if the submission has finished.
1695 if (eventQuery.serial > mLastCompletedQueueSerial)
1696 {
1697 break;
1698 }
1699
1700 // See if the results are available.
1701 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05001702 VkResult result = mGpuEventQueryPool.getQueryPool(eventQuery.queryPoolIndex)
1703 ->getResults(mDevice, eventQuery.queryIndex, 1,
1704 sizeof(gpuTimestampCycles), &gpuTimestampCycles,
1705 sizeof(gpuTimestampCycles), VK_QUERY_RESULT_64_BIT);
1706 if (result == VK_NOT_READY)
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001707 {
1708 break;
1709 }
Yuly Novikov27780292018-11-09 11:19:49 -05001710 ANGLE_VK_TRY(context, result);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001711
1712 mGpuEventQueryPool.freeQuery(context, eventQuery.queryPoolIndex, eventQuery.queryIndex);
1713
1714 GpuEvent event;
1715 event.gpuTimestampCycles = gpuTimestampCycles;
1716 event.name = eventQuery.name;
1717 event.phase = eventQuery.phase;
1718
1719 mGpuEvents.emplace_back(event);
1720
1721 ++finishedCount;
1722 }
1723
1724 mInFlightGpuEventQueries.erase(mInFlightGpuEventQueries.begin(),
1725 mInFlightGpuEventQueries.begin() + finishedCount);
1726
Jamie Madill7c985f52018-11-29 18:16:17 -05001727 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001728}
1729
1730void RendererVk::flushGpuEvents(double nextSyncGpuTimestampS, double nextSyncCpuTimestampS)
1731{
1732 if (mGpuEvents.size() == 0)
1733 {
1734 return;
1735 }
1736
1737 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1738 ASSERT(platform);
1739
1740 // Find the slope of the clock drift for adjustment
1741 double lastGpuSyncTimeS = mGpuClockSync.gpuTimestampS;
1742 double lastGpuSyncDiffS = mGpuClockSync.cpuTimestampS - mGpuClockSync.gpuTimestampS;
1743 double gpuSyncDriftSlope = 0;
1744
1745 double nextGpuSyncTimeS = nextSyncGpuTimestampS;
1746 double nextGpuSyncDiffS = nextSyncCpuTimestampS - nextSyncGpuTimestampS;
1747
1748 // No gpu trace events should have been generated before the clock sync, so if there is no
1749 // "previous" clock sync, there should be no gpu events (i.e. the function early-outs above).
1750 ASSERT(mGpuClockSync.gpuTimestampS != std::numeric_limits<double>::max() &&
1751 mGpuClockSync.cpuTimestampS != std::numeric_limits<double>::max());
1752
1753 gpuSyncDriftSlope =
1754 (nextGpuSyncDiffS - lastGpuSyncDiffS) / (nextGpuSyncTimeS - lastGpuSyncTimeS);
1755
1756 for (const GpuEvent &event : mGpuEvents)
1757 {
1758 double gpuTimestampS =
1759 (event.gpuTimestampCycles - mGpuEventTimestampOrigin) *
1760 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) * 1e-9;
1761
1762 // Account for clock drift.
1763 gpuTimestampS += lastGpuSyncDiffS + gpuSyncDriftSlope * (gpuTimestampS - lastGpuSyncTimeS);
1764
1765 // Generate the trace now that the GPU timestamp is available and clock drifts are accounted
1766 // for.
1767 static long long eventId = 1;
1768 static const unsigned char *categoryEnabled =
1769 TRACE_EVENT_API_GET_CATEGORY_ENABLED("gpu.angle.gpu");
1770 platform->addTraceEvent(platform, event.phase, categoryEnabled, event.name, eventId++,
1771 gpuTimestampS, 0, nullptr, nullptr, nullptr, TRACE_EVENT_FLAG_NONE);
1772 }
1773
1774 mGpuEvents.clear();
1775}
1776
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -05001777template <VkFormatFeatureFlags VkFormatProperties::*features>
1778bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1779{
1780 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
1781 VkFormatProperties &deviceProperties = mFormatProperties[format];
1782
1783 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
1784 {
1785 // If we don't have the actual device features, see if the requested features are mandatory.
1786 // If so, there's no need to query the device.
1787 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
1788 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
1789 {
1790 return true;
1791 }
1792
1793 // Otherwise query the format features and cache it.
1794 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
1795 }
1796
1797 return IsMaskFlagSet(deviceProperties.*features, featureBits);
1798}
1799
Jamie Madillaaca96e2018-06-12 10:19:48 -04001800uint32_t GetUniformBufferDescriptorCount()
1801{
1802 return kUniformBufferDescriptorsPerDescriptorSet;
1803}
1804
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001805} // namespace rx