blob: e7f6ebee756194cf49dd576e03a061b17eefdfbb [file] [log] [blame]
Jamie Madill9e54b5a2016-05-25 12:57:39 -04001//
2// Copyright 2016 The ANGLE Project Authors. All rights reserved.
3// Use of this source code is governed by a BSD-style license that can be
4// found in the LICENSE file.
5//
6// RendererVk.cpp:
7// Implements the class methods for RendererVk.
8//
9
10#include "libANGLE/renderer/vulkan/RendererVk.h"
11
Jamie Madill4d0bf552016-12-28 15:45:24 -050012// Placing this first seems to solve an intellisense bug.
Jamie Madill3c424b42018-01-19 12:35:09 -050013#include "libANGLE/renderer/vulkan/vk_utils.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050014
Jamie Madille09bd5d2016-11-29 16:20:35 -050015#include <EGL/eglext.h>
16
Jamie Madill9e54b5a2016-05-25 12:57:39 -040017#include "common/debug.h"
Yuly Novikov5fe7c5b2019-01-17 12:16:34 -050018#include "common/platform.h"
Jamie Madilla66779f2017-01-06 10:43:44 -050019#include "common/system_utils.h"
Jamie Madill85ca1892019-01-16 13:27:15 -050020#include "libANGLE/Context.h"
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040021#include "libANGLE/Display.h"
Jamie Madill4d0bf552016-12-28 15:45:24 -050022#include "libANGLE/renderer/driver_utils.h"
Jamie Madill1f46bc12018-02-20 16:09:43 -050023#include "libANGLE/renderer/vulkan/CommandGraph.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050024#include "libANGLE/renderer/vulkan/CompilerVk.h"
Jamie Madill85ca1892019-01-16 13:27:15 -050025#include "libANGLE/renderer/vulkan/ContextVk.h"
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040026#include "libANGLE/renderer/vulkan/DisplayVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050027#include "libANGLE/renderer/vulkan/FramebufferVk.h"
Jamie Madill8ecf7f92017-01-13 17:29:52 -050028#include "libANGLE/renderer/vulkan/GlslangWrapper.h"
Jamie Madillffa4cbb2018-01-23 13:04:07 -050029#include "libANGLE/renderer/vulkan/ProgramVk.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050030#include "libANGLE/renderer/vulkan/VertexArrayVk.h"
Luc Ferrone4741fd2018-01-25 13:25:27 -050031#include "libANGLE/renderer/vulkan/vk_caps_utils.h"
Jamie Madill3c424b42018-01-19 12:35:09 -050032#include "libANGLE/renderer/vulkan/vk_format_utils.h"
Jamie Madille09bd5d2016-11-29 16:20:35 -050033#include "platform/Platform.h"
Shahbaz Youssefi61656022018-10-24 15:00:50 -040034#include "third_party/trace_event/trace_event.h"
35
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070036// Consts
37namespace
38{
Jamie Madill7c985f52018-11-29 18:16:17 -050039const uint32_t kMockVendorID = 0xba5eba11;
40const uint32_t kMockDeviceID = 0xf005ba11;
41constexpr char kMockDeviceName[] = "Vulkan Mock Device";
42constexpr size_t kInFlightCommandsLimit = 100u;
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -050043constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
Tobin Ehlisa3b220f2018-03-06 16:22:13 -070044} // anonymous namespace
45
Jamie Madill9e54b5a2016-05-25 12:57:39 -040046namespace rx
47{
48
Jamie Madille09bd5d2016-11-29 16:20:35 -050049namespace
50{
Luc Ferrondaedf4d2018-03-16 09:28:53 -040051// We currently only allocate 2 uniform buffer per descriptor set, one for the fragment shader and
52// one for the vertex shader.
53constexpr size_t kUniformBufferDescriptorsPerDescriptorSet = 2;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -040054// Update the pipeline cache every this many swaps (if 60fps, this means every 10 minutes)
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -050055constexpr uint32_t kPipelineCacheVkUpdatePeriod = 10 * 60 * 60;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -040056// Wait a maximum of 10s. If that times out, we declare it a failure.
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -050057constexpr uint64_t kMaxFenceWaitTimeNs = 10'000'000'000llu;
Ian Elliott356d26c2019-02-11 11:27:01 -070058// Per the Vulkan specification, as long as Vulkan 1.1+ is returned by vkEnumerateInstanceVersion,
59// ANGLE must indicate the highest version of Vulkan functionality that it uses. The Vulkan
60// validation layers will issue messages for any core functionality that requires a higher version.
61// This value must be increased whenever ANGLE starts using functionality from a newer core
62// version of Vulkan.
63constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
Jamie Madille09bd5d2016-11-29 16:20:35 -050064
Omar El Sheikh26c61b22018-06-29 12:50:59 -060065bool ShouldEnableMockICD(const egl::AttributeMap &attribs)
66{
67#if !defined(ANGLE_PLATFORM_ANDROID)
68 // Mock ICD does not currently run on Android
69 return (attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
70 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE) ==
71 EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE);
72#else
73 return false;
74#endif // !defined(ANGLE_PLATFORM_ANDROID)
75}
76
Frank Henigman52047de2018-11-13 17:22:36 -050077bool StrLess(const char *a, const char *b)
Jamie Madille09bd5d2016-11-29 16:20:35 -050078{
Frank Henigman52047de2018-11-13 17:22:36 -050079 return strcmp(a, b) < 0;
Jamie Madille09bd5d2016-11-29 16:20:35 -050080}
81
Frank Henigman52047de2018-11-13 17:22:36 -050082VkResult VerifyExtensionsPresent(const RendererVk::ExtensionNameList &haystack,
83 const RendererVk::ExtensionNameList &needles)
Tobin Ehlis3a181e32018-08-29 15:17:05 -060084{
Frank Henigman52047de2018-11-13 17:22:36 -050085 // NOTE: The lists must be sorted.
86 return std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess)
87 ? VK_SUCCESS
88 : VK_ERROR_EXTENSION_NOT_PRESENT;
89}
90
91bool ExtensionFound(const char *needle, const RendererVk::ExtensionNameList &haystack)
92{
93 // NOTE: The list must be sorted.
94 return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
Tobin Ehlis3a181e32018-08-29 15:17:05 -060095}
96
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -050097// Array of Validation error/warning messages that will be ignored, should include bugID
Jamie Madill00f43c92019-02-09 11:41:12 -050098constexpr const char *kSkippedMessages[] = {
99 // http://anglebug.com/2866
100 "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500101 // http://anglebug.com/2796
Jamie Madill00f43c92019-02-09 11:41:12 -0500102 "UNASSIGNED-CoreValidation-Shader-PointSizeMissing",
103};
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500104
105// Suppress validation errors that are known
106// return "true" if given code/prefix/message is known, else return "false"
107bool IsIgnoredDebugMessage(const char *message)
108{
Michael Spang25839802019-01-30 18:02:51 -0500109 if (!message)
110 {
111 return false;
112 }
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500113 for (const char *msg : kSkippedMessages)
114 {
115 if (strstr(message, msg) != nullptr)
116 {
117 return true;
118 }
119 }
120 return false;
121}
122
123const char *GetVkObjectTypeName(VkObjectType type)
124{
125 switch (type)
126 {
127 case VK_OBJECT_TYPE_UNKNOWN:
128 return "Unknown";
129 case VK_OBJECT_TYPE_INSTANCE:
130 return "Instance";
131 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
132 return "Physical Device";
133 case VK_OBJECT_TYPE_DEVICE:
134 return "Device";
135 case VK_OBJECT_TYPE_QUEUE:
136 return "Queue";
137 case VK_OBJECT_TYPE_SEMAPHORE:
138 return "Semaphore";
139 case VK_OBJECT_TYPE_COMMAND_BUFFER:
140 return "Command Buffer";
141 case VK_OBJECT_TYPE_FENCE:
142 return "Fence";
143 case VK_OBJECT_TYPE_DEVICE_MEMORY:
144 return "Device Memory";
145 case VK_OBJECT_TYPE_BUFFER:
146 return "Buffer";
147 case VK_OBJECT_TYPE_IMAGE:
148 return "Image";
149 case VK_OBJECT_TYPE_EVENT:
150 return "Event";
151 case VK_OBJECT_TYPE_QUERY_POOL:
152 return "Query Pool";
153 case VK_OBJECT_TYPE_BUFFER_VIEW:
154 return "Buffer View";
155 case VK_OBJECT_TYPE_IMAGE_VIEW:
156 return "Image View";
157 case VK_OBJECT_TYPE_SHADER_MODULE:
158 return "Shader Module";
159 case VK_OBJECT_TYPE_PIPELINE_CACHE:
160 return "Pipeline Cache";
161 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
162 return "Pipeline Layout";
163 case VK_OBJECT_TYPE_RENDER_PASS:
164 return "Render Pass";
165 case VK_OBJECT_TYPE_PIPELINE:
166 return "Pipeline";
167 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
168 return "Descriptor Set Layout";
169 case VK_OBJECT_TYPE_SAMPLER:
170 return "Sampler";
171 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
172 return "Descriptor Pool";
173 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
174 return "Descriptor Set";
175 case VK_OBJECT_TYPE_FRAMEBUFFER:
176 return "Framebuffer";
177 case VK_OBJECT_TYPE_COMMAND_POOL:
178 return "Command Pool";
179 case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
180 return "Sampler YCbCr Conversion";
181 case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
182 return "Descriptor Update Template";
183 case VK_OBJECT_TYPE_SURFACE_KHR:
184 return "Surface";
185 case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
186 return "Swapchain";
187 case VK_OBJECT_TYPE_DISPLAY_KHR:
188 return "Display";
189 case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
190 return "Display Mode";
191 case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
192 return "Debug Report Callback";
193 case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
194 return "Object Table";
195 case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
196 return "Indirect Commands Layout";
197 case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
198 return "Debug Utils Messenger";
199 case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
200 return "Validation Cache";
Shahbaz Youssefi38051222019-02-13 21:21:13 +0000201 case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NVX:
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500202 return "Acceleration Structure";
203 default:
204 return "<Unrecognized>";
205 }
206}
207
208VKAPI_ATTR VkBool32 VKAPI_CALL
209DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
210 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
211 const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
212 void *userData)
213{
Shahbaz Youssefi0c01e362019-02-13 21:27:23 +0000214 constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
215 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
216 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
217
218 // Check if we even care about this message.
219 if ((messageSeverity & kSeveritiesToLog) == 0)
220 {
221 return VK_FALSE;
222 }
223
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500224 // See if it's an issue we are aware of and don't want to be spammed about.
225 if (IsIgnoredDebugMessage(callbackData->pMessageIdName))
226 {
227 return VK_FALSE;
228 }
229
230 std::ostringstream log;
Michael Spang25839802019-01-30 18:02:51 -0500231 if (callbackData->pMessageIdName)
232 {
233 log << "[ " << callbackData->pMessageIdName << " ] ";
234 }
235 log << callbackData->pMessage << std::endl;
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500236
237 // Aesthetic value based on length of the function name, line number, etc.
238 constexpr size_t kStartIndent = 28;
239
240 // Output the debug marker hierarchy under which this error has occured.
241 size_t indent = kStartIndent;
242 if (callbackData->queueLabelCount > 0)
243 {
244 log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
245 for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
246 {
247 log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
248 << std::endl;
249 }
250 }
251 if (callbackData->cmdBufLabelCount > 0)
252 {
253 log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
254 for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
255 {
256 log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
257 << std::endl;
258 }
259 }
260 // Output the objects involved in this error message.
261 if (callbackData->objectCount > 0)
262 {
263 for (uint32_t i = 0; i < callbackData->objectCount; ++i)
264 {
265 const char *objectName = callbackData->pObjects[i].pObjectName;
266 const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
267 uint64_t objectHandle = callbackData->pObjects[i].objectHandle;
268 log << std::string(indent, ' ') << "Object: ";
269 if (objectHandle == 0)
270 {
271 log << "VK_NULL_HANDLE";
272 }
273 else
274 {
275 log << "0x" << std::hex << objectHandle << std::dec;
276 }
277 log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
278 if (objectName)
279 {
280 log << " [" << objectName << "]";
281 }
282 log << std::endl;
283 }
284 }
285
Jamie Madill54ed8f02019-02-11 12:32:04 -0500286 bool isError = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
287 std::string msg = log.str();
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500288
289 if (isError)
290 {
Jamie Madill54ed8f02019-02-11 12:32:04 -0500291 ERR() << msg;
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500292 }
293 else
294 {
Jamie Madill54ed8f02019-02-11 12:32:04 -0500295 WARN() << msg;
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500296 }
297
298 return VK_FALSE;
299}
300
Yuly Novikov199f4292018-01-19 19:04:05 -0500301VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
302 VkDebugReportObjectTypeEXT objectType,
303 uint64_t object,
304 size_t location,
305 int32_t messageCode,
306 const char *layerPrefix,
307 const char *message,
308 void *userData)
Jamie Madill0448ec82016-12-23 13:41:47 -0500309{
Tobin Ehlis3a181e32018-08-29 15:17:05 -0600310 if (IsIgnoredDebugMessage(message))
311 {
312 return VK_FALSE;
313 }
Jamie Madill0448ec82016-12-23 13:41:47 -0500314 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
315 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500316 ERR() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500317#if !defined(NDEBUG)
318 // Abort the call in Debug builds.
319 return VK_TRUE;
320#endif
321 }
322 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
323 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500324 WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500325 }
326 else
327 {
Yuly Novikovbcb3f9b2017-01-27 22:45:18 -0500328 // Uncomment this if you want Vulkan spam.
329 // WARN() << message;
Jamie Madill0448ec82016-12-23 13:41:47 -0500330 }
331
332 return VK_FALSE;
333}
334
Yuly Novikov199f4292018-01-19 19:04:05 -0500335// If we're loading the validation layers, we could be running from any random directory.
336// Change to the executable directory so we can find the layers, then change back to the
337// previous directory to be safe we don't disrupt the application.
338class ScopedVkLoaderEnvironment : angle::NonCopyable
339{
340 public:
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600341 ScopedVkLoaderEnvironment(bool enableValidationLayers, bool enableMockICD)
342 : mEnableValidationLayers(enableValidationLayers),
343 mEnableMockICD(enableMockICD),
344 mChangedCWD(false),
345 mChangedICDPath(false)
Yuly Novikov199f4292018-01-19 19:04:05 -0500346 {
347// Changing CWD and setting environment variables makes no sense on Android,
348// since this code is a part of Java application there.
349// Android Vulkan loader doesn't need this either.
Michael Spang229fc832019-01-21 18:09:15 -0500350#if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA)
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600351 if (enableMockICD)
352 {
353 // Override environment variable to use built Mock ICD
354 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
355 mPreviousICDPath = angle::GetEnvironmentVar(g_VkICDPathEnv);
356 mChangedICDPath = angle::SetEnvironmentVar(g_VkICDPathEnv, ANGLE_VK_ICD_JSON);
357 if (!mChangedICDPath)
358 {
359 ERR() << "Error setting Path for Mock/Null Driver.";
360 mEnableMockICD = false;
361 }
362 }
Jamie Madill46848422018-08-09 10:46:06 -0400363 if (mEnableValidationLayers || mEnableMockICD)
Yuly Novikov199f4292018-01-19 19:04:05 -0500364 {
365 const auto &cwd = angle::GetCWD();
366 if (!cwd.valid())
367 {
368 ERR() << "Error getting CWD for Vulkan layers init.";
369 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400370 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500371 }
372 else
373 {
374 mPreviousCWD = cwd.value();
Jamie Madillbab03022019-01-16 14:12:28 -0500375 std::string exeDir = angle::GetExecutableDirectory();
376 mChangedCWD = angle::SetCWD(exeDir.c_str());
Yuly Novikov199f4292018-01-19 19:04:05 -0500377 if (!mChangedCWD)
378 {
379 ERR() << "Error setting CWD for Vulkan layers init.";
380 mEnableValidationLayers = false;
Jamie Madill46848422018-08-09 10:46:06 -0400381 mEnableMockICD = false;
Yuly Novikov199f4292018-01-19 19:04:05 -0500382 }
383 }
384 }
385
386 // Override environment variable to use the ANGLE layers.
387 if (mEnableValidationLayers)
388 {
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700389 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_DATA_DIR))
Yuly Novikov199f4292018-01-19 19:04:05 -0500390 {
391 ERR() << "Error setting environment for Vulkan layers init.";
392 mEnableValidationLayers = false;
393 }
394 }
395#endif // !defined(ANGLE_PLATFORM_ANDROID)
396 }
397
398 ~ScopedVkLoaderEnvironment()
399 {
400 if (mChangedCWD)
401 {
402#if !defined(ANGLE_PLATFORM_ANDROID)
403 ASSERT(mPreviousCWD.valid());
404 angle::SetCWD(mPreviousCWD.value().c_str());
405#endif // !defined(ANGLE_PLATFORM_ANDROID)
406 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600407 if (mChangedICDPath)
408 {
Omar El Sheikh80d4ef12018-07-13 17:08:19 -0600409 if (mPreviousICDPath.value().empty())
410 {
411 angle::UnsetEnvironmentVar(g_VkICDPathEnv);
412 }
413 else
414 {
415 angle::SetEnvironmentVar(g_VkICDPathEnv, mPreviousICDPath.value().c_str());
416 }
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600417 }
Yuly Novikov199f4292018-01-19 19:04:05 -0500418 }
419
Jamie Madillaaca96e2018-06-12 10:19:48 -0400420 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
Yuly Novikov199f4292018-01-19 19:04:05 -0500421
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600422 bool canEnableMockICD() const { return mEnableMockICD; }
423
Yuly Novikov199f4292018-01-19 19:04:05 -0500424 private:
425 bool mEnableValidationLayers;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600426 bool mEnableMockICD;
Yuly Novikov199f4292018-01-19 19:04:05 -0500427 bool mChangedCWD;
428 Optional<std::string> mPreviousCWD;
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600429 bool mChangedICDPath;
430 Optional<std::string> mPreviousICDPath;
Yuly Novikov199f4292018-01-19 19:04:05 -0500431};
432
Jamie Madill21061022018-07-12 23:56:30 -0400433void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
434 bool preferMockICD,
435 VkPhysicalDevice *physicalDeviceOut,
436 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
437{
438 ASSERT(!physicalDevices.empty());
439 if (preferMockICD)
440 {
441 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
442 {
443 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
444 if ((kMockVendorID == physicalDevicePropertiesOut->vendorID) &&
445 (kMockDeviceID == physicalDevicePropertiesOut->deviceID) &&
446 (strcmp(kMockDeviceName, physicalDevicePropertiesOut->deviceName) == 0))
447 {
448 *physicalDeviceOut = physicalDevice;
449 return;
450 }
451 }
452 WARN() << "Vulkan Mock Driver was requested but Mock Device was not found. Using default "
453 "physicalDevice instead.";
454 }
455
456 // Fall back to first device.
457 *physicalDeviceOut = physicalDevices[0];
458 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
459}
Jamie Madill0da73fe2018-10-02 09:31:39 -0400460
461// Initially dumping the command graphs is disabled.
462constexpr bool kEnableCommandGraphDiagnostics = false;
Ian Elliottbcb78902018-12-19 11:46:29 -0700463
Jamie Madille09bd5d2016-11-29 16:20:35 -0500464} // anonymous namespace
465
Jamie Madill49ac74b2017-12-21 14:42:33 -0500466// CommandBatch implementation.
Jamie Madillaaca96e2018-06-12 10:19:48 -0400467RendererVk::CommandBatch::CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500468
Jamie Madillaaca96e2018-06-12 10:19:48 -0400469RendererVk::CommandBatch::~CommandBatch() = default;
Jamie Madill49ac74b2017-12-21 14:42:33 -0500470
471RendererVk::CommandBatch::CommandBatch(CommandBatch &&other)
Tobin Ehlis47ca1b22019-01-23 16:11:41 +0000472 : commandPool(std::move(other.commandPool)), fence(std::move(other.fence)), serial(other.serial)
Jamie Madillb980c562018-11-27 11:34:27 -0500473{}
Jamie Madill49ac74b2017-12-21 14:42:33 -0500474
475RendererVk::CommandBatch &RendererVk::CommandBatch::operator=(CommandBatch &&other)
476{
477 std::swap(commandPool, other.commandPool);
478 std::swap(fence, other.fence);
479 std::swap(serial, other.serial);
480 return *this;
481}
482
Tobin Ehlis47ca1b22019-01-23 16:11:41 +0000483void RendererVk::CommandBatch::destroy(VkDevice device)
Jamie Madillbea35a62018-07-05 11:54:10 -0400484{
Tobin Ehlis47ca1b22019-01-23 16:11:41 +0000485 commandPool.destroy(device);
Jamie Madillbea35a62018-07-05 11:54:10 -0400486 fence.destroy(device);
487}
488
Jamie Madill9f2a8612017-11-30 12:43:09 -0500489// RendererVk implementation.
Jamie Madill0448ec82016-12-23 13:41:47 -0500490RendererVk::RendererVk()
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400491 : mDisplay(nullptr),
492 mCapsInitialized(false),
Ian Elliottbcb78902018-12-19 11:46:29 -0700493 mFeaturesInitialized(false),
Jamie Madill0448ec82016-12-23 13:41:47 -0500494 mInstance(VK_NULL_HANDLE),
495 mEnableValidationLayers(false),
Jamie Madill0ea96212018-10-30 15:14:51 -0400496 mEnableMockICD(false),
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500497 mDebugUtilsMessenger(VK_NULL_HANDLE),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500498 mDebugReportCallback(VK_NULL_HANDLE),
499 mPhysicalDevice(VK_NULL_HANDLE),
500 mQueue(VK_NULL_HANDLE),
501 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
Frank Henigman52047de2018-11-13 17:22:36 -0500502 mMaxVertexAttribDivisor(1),
Jamie Madill4d0bf552016-12-28 15:45:24 -0500503 mDevice(VK_NULL_HANDLE),
Jamie Madillfb05bcb2017-06-07 15:43:18 -0400504 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400505 mCurrentQueueSerial(mQueueSerialFactory.generate()),
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400506 mDeviceLost(false),
Jamie Madill0da73fe2018-10-02 09:31:39 -0400507 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400508 mCommandGraph(kEnableCommandGraphDiagnostics),
509 mGpuEventsEnabled(false),
510 mGpuClockSync{std::numeric_limits<double>::max(), std::numeric_limits<double>::max()},
511 mGpuEventTimestampOrigin(0)
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -0500512{
513 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
514 mFormatProperties.fill(invalid);
515}
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400516
Jamie Madillb980c562018-11-27 11:34:27 -0500517RendererVk::~RendererVk() {}
Jamie Madill21061022018-07-12 23:56:30 -0400518
519void RendererVk::onDestroy(vk::Context *context)
520{
Jamie Madill49ac74b2017-12-21 14:42:33 -0500521 if (!mInFlightCommands.empty() || !mGarbage.empty())
Jamie Madill4c26fc22017-02-24 11:04:10 -0500522 {
Jamie Madill49ac74b2017-12-21 14:42:33 -0500523 // TODO(jmadill): Not nice to pass nullptr here, but shouldn't be a problem.
Jamie Madill21061022018-07-12 23:56:30 -0400524 (void)finish(context);
Jamie Madill4c26fc22017-02-24 11:04:10 -0500525 }
526
Shahbaz Youssefie3219402018-12-08 16:54:14 +0100527 mUtils.destroy(mDevice);
Shahbaz Youssefi8f1b7a62018-11-14 16:02:54 -0500528
Jamie Madillc7918ce2018-06-13 13:25:31 -0400529 mPipelineLayoutCache.destroy(mDevice);
530 mDescriptorSetLayoutCache.destroy(mDevice);
531
Jamie Madill9f2a8612017-11-30 12:43:09 -0500532 mRenderPassCache.destroy(mDevice);
Jamie Madilldc65c5b2018-11-21 11:07:26 -0500533 mPipelineCache.destroy(mDevice);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -0400534 mSubmitSemaphorePool.destroy(mDevice);
Jamie Madilld47044a2018-04-27 11:45:03 -0400535 mShaderLibrary.destroy(mDevice);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -0400536 mGpuEventQueryPool.destroy(mDevice);
Jamie Madill9f2a8612017-11-30 12:43:09 -0500537
Jamie Madill06ca6342018-07-12 15:56:53 -0400538 GlslangWrapper::Release();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500539
Jamie Madill5deea722017-02-16 10:44:46 -0500540 if (mCommandPool.valid())
541 {
Tobin Ehlis47ca1b22019-01-23 16:11:41 +0000542 mCommandPool.destroy(mDevice);
Jamie Madill5deea722017-02-16 10:44:46 -0500543 }
Jamie Madill4d0bf552016-12-28 15:45:24 -0500544
545 if (mDevice)
546 {
547 vkDestroyDevice(mDevice, nullptr);
548 mDevice = VK_NULL_HANDLE;
549 }
550
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500551 if (mDebugUtilsMessenger)
Jamie Madill0448ec82016-12-23 13:41:47 -0500552 {
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500553 ASSERT(mInstance && vkDestroyDebugUtilsMessengerEXT);
554 vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
555
556 ASSERT(mDebugReportCallback == VK_NULL_HANDLE);
557 }
558 else if (mDebugReportCallback)
559 {
560 ASSERT(mInstance && vkDestroyDebugReportCallbackEXT);
561 vkDestroyDebugReportCallbackEXT(mInstance, mDebugReportCallback, nullptr);
Jamie Madill0448ec82016-12-23 13:41:47 -0500562 }
563
Jamie Madill4d0bf552016-12-28 15:45:24 -0500564 if (mInstance)
565 {
566 vkDestroyInstance(mInstance, nullptr);
567 mInstance = VK_NULL_HANDLE;
568 }
569
Omar El Sheikheb4b8692018-07-17 10:55:40 -0600570 mMemoryProperties.destroy();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500571 mPhysicalDevice = VK_NULL_HANDLE;
Jamie Madill327ba852016-11-30 12:38:28 -0500572}
573
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400574void RendererVk::notifyDeviceLost()
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400575{
576 mDeviceLost = true;
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400577
578 mCommandGraph.clear();
Jamie Madill85ca1892019-01-16 13:27:15 -0500579 nextSerial();
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400580 freeAllInFlightResources();
581
582 mDisplay->notifyDeviceLost();
Geoff Lang2fe5e1d2018-08-28 14:00:24 -0400583}
584
585bool RendererVk::isDeviceLost() const
586{
587 return mDeviceLost;
588}
589
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400590angle::Result RendererVk::initialize(DisplayVk *displayVk,
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400591 egl::Display *display,
Michael Spang740db7f2019-02-06 09:40:13 -0500592 const char *wsiExtension,
593 const char *wsiLayer)
Jamie Madill327ba852016-11-30 12:38:28 -0500594{
Yuly Novikovb56ddbb2018-11-02 16:53:18 -0400595 mDisplay = display;
596 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
Omar El Sheikh26c61b22018-06-29 12:50:59 -0600597 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs),
598 ShouldEnableMockICD(attribs));
Yuly Novikov199f4292018-01-19 19:04:05 -0500599 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
Jamie Madill0ea96212018-10-30 15:14:51 -0400600 mEnableMockICD = scopedEnvironment.canEnableMockICD();
Jamie Madilla66779f2017-01-06 10:43:44 -0500601
Jamie Madill0448ec82016-12-23 13:41:47 -0500602 // Gather global layer properties.
603 uint32_t instanceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400604 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
Jamie Madill0448ec82016-12-23 13:41:47 -0500605
606 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
607 if (instanceLayerCount > 0)
608 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400609 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
610 instanceLayerProps.data()));
Jamie Madill0448ec82016-12-23 13:41:47 -0500611 }
612
Michael Spang6c13c702019-02-06 15:59:44 -0500613 VulkanLayerVector enabledInstanceLayerNames;
614 if (mEnableValidationLayers)
615 {
616 bool layersRequested =
617 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
618 mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
619 &enabledInstanceLayerNames);
620 }
621
622 if (wsiLayer)
623 {
624 enabledInstanceLayerNames.push_back(wsiLayer);
625 }
626
627 // Enumerate instance extensions that are provided by the vulkan
628 // implementation and implicit layers.
Jamie Madille09bd5d2016-11-29 16:20:35 -0500629 uint32_t instanceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400630 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400631 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500632
633 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
634 if (instanceExtensionCount > 0)
635 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400636 ANGLE_VK_TRY(displayVk,
637 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
638 instanceExtensionProps.data()));
Jamie Madille09bd5d2016-11-29 16:20:35 -0500639 }
640
Michael Spang6c13c702019-02-06 15:59:44 -0500641 // Enumerate instance extensions that are provided by explicit layers.
642 for (const char *layerName : enabledInstanceLayerNames)
Jamie Madill0448ec82016-12-23 13:41:47 -0500643 {
Michael Spang6c13c702019-02-06 15:59:44 -0500644 uint32_t previousExtensionCount = instanceExtensionProps.size();
645 uint32_t instanceLayerExtensionCount = 0;
646 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
647 layerName, &instanceLayerExtensionCount, nullptr));
648 instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
649 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
650 layerName, &instanceLayerExtensionCount,
651 instanceExtensionProps.data() + previousExtensionCount));
Jamie Madill0448ec82016-12-23 13:41:47 -0500652 }
653
Frank Henigman52047de2018-11-13 17:22:36 -0500654 ExtensionNameList instanceExtensionNames;
655 if (!instanceExtensionProps.empty())
656 {
657 for (const VkExtensionProperties &i : instanceExtensionProps)
658 {
659 instanceExtensionNames.push_back(i.extensionName);
660 }
661 std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
662 }
663
664 ExtensionNameList enabledInstanceExtensions;
Jamie Madille09bd5d2016-11-29 16:20:35 -0500665 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
Michael Spang740db7f2019-02-06 09:40:13 -0500666 enabledInstanceExtensions.push_back(wsiExtension);
Jamie Madille09bd5d2016-11-29 16:20:35 -0500667
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500668 bool enableDebugUtils =
669 mEnableValidationLayers &&
Frank Henigman52047de2018-11-13 17:22:36 -0500670 ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500671 bool enableDebugReport =
672 mEnableValidationLayers && !enableDebugUtils &&
Frank Henigman52047de2018-11-13 17:22:36 -0500673 ExtensionFound(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instanceExtensionNames);
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500674
675 if (enableDebugUtils)
676 {
677 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
678 }
679 else if (enableDebugReport)
Jamie Madill0448ec82016-12-23 13:41:47 -0500680 {
681 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
682 }
683
Jamie Madille09bd5d2016-11-29 16:20:35 -0500684 // Verify the required extensions are in the extension names set. Fail if not.
Frank Henigman52047de2018-11-13 17:22:36 -0500685 std::sort(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(), StrLess);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400686 ANGLE_VK_TRY(displayVk,
Frank Henigman52047de2018-11-13 17:22:36 -0500687 VerifyExtensionsPresent(instanceExtensionNames, enabledInstanceExtensions));
688
689 // Enable VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME if available.
690 if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
691 instanceExtensionNames))
692 {
693 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
694 }
Jamie Madille09bd5d2016-11-29 16:20:35 -0500695
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400696 VkApplicationInfo applicationInfo = {};
Jamie Madill327ba852016-11-30 12:38:28 -0500697 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
Jamie Madill327ba852016-11-30 12:38:28 -0500698 applicationInfo.pApplicationName = "ANGLE";
699 applicationInfo.applicationVersion = 1;
700 applicationInfo.pEngineName = "ANGLE";
701 applicationInfo.engineVersion = 1;
Ian Elliott899c5d22018-12-21 13:12:50 -0700702
703 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
704 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
705 if (!enumerateInstanceVersion)
706 {
707 applicationInfo.apiVersion = VK_API_VERSION_1_0;
708 }
709 else
710 {
711 uint32_t apiVersion = VK_API_VERSION_1_0;
712 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
713 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
714 {
Ian Elliott356d26c2019-02-11 11:27:01 -0700715 // This is the highest version of core Vulkan functionality that ANGLE uses.
716 applicationInfo.apiVersion = kPreferredVulkanAPIVersion;
Ian Elliott899c5d22018-12-21 13:12:50 -0700717 }
718 else
719 {
Ian Elliott356d26c2019-02-11 11:27:01 -0700720 // Since only 1.0 instance-level functionality is available, this must set to 1.0.
Ian Elliott899c5d22018-12-21 13:12:50 -0700721 applicationInfo.apiVersion = VK_API_VERSION_1_0;
722 }
723 }
Jamie Madill327ba852016-11-30 12:38:28 -0500724
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400725 VkInstanceCreateInfo instanceInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -0500726 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
727 instanceInfo.flags = 0;
728 instanceInfo.pApplicationInfo = &applicationInfo;
Jamie Madill327ba852016-11-30 12:38:28 -0500729
Jamie Madille09bd5d2016-11-29 16:20:35 -0500730 // Enable requested layers and extensions.
731 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
732 instanceInfo.ppEnabledExtensionNames =
733 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
Michael Spang6c13c702019-02-06 15:59:44 -0500734 instanceInfo.enabledLayerCount = enabledInstanceLayerNames.size();
735 instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
Jamie Madill327ba852016-11-30 12:38:28 -0500736
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400737 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
Jamie Madill327ba852016-11-30 12:38:28 -0500738
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500739 if (enableDebugUtils)
Jamie Madill0448ec82016-12-23 13:41:47 -0500740 {
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500741 // Try to use the newer EXT_debug_utils if it exists.
742 InitDebugUtilsEXTFunctions(mInstance);
743
744 // Create the messenger callback.
745 VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
746
747 messengerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
Shahbaz Youssefi0c01e362019-02-13 21:27:23 +0000748 messengerInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
749 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
750 messengerInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
751 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
752 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500753 messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
754 messengerInfo.pUserData = this;
755
756 ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
757 &mDebugUtilsMessenger));
758 }
759 else if (enableDebugReport)
760 {
761 // Fallback to EXT_debug_report.
762 InitDebugReportEXTFunctions(mInstance);
763
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400764 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
Jamie Madill0448ec82016-12-23 13:41:47 -0500765
766 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500767 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
Jamie Madill0448ec82016-12-23 13:41:47 -0500768 debugReportInfo.pfnCallback = &DebugReportCallback;
769 debugReportInfo.pUserData = this;
770
Shahbaz Youssefi5bca4fe2019-01-09 17:07:06 -0500771 ANGLE_VK_TRY(displayVk, vkCreateDebugReportCallbackEXT(mInstance, &debugReportInfo, nullptr,
772 &mDebugReportCallback));
Jamie Madill0448ec82016-12-23 13:41:47 -0500773 }
774
Frank Henigman52047de2018-11-13 17:22:36 -0500775 if (std::find(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(),
776 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
777 enabledInstanceExtensions.end())
778 {
779 InitGetPhysicalDeviceProperties2KHRFunctions(mInstance);
780 ASSERT(vkGetPhysicalDeviceProperties2KHR);
781 }
782
Jamie Madill4d0bf552016-12-28 15:45:24 -0500783 uint32_t physicalDeviceCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400784 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
785 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500786
787 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700788 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400789 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
790 physicalDevices.data()));
Jamie Madill0ea96212018-10-30 15:14:51 -0400791 ChoosePhysicalDevice(physicalDevices, mEnableMockICD, &mPhysicalDevice,
Tobin Ehlisa3b220f2018-03-06 16:22:13 -0700792 &mPhysicalDeviceProperties);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500793
Jamie Madill30b5d842018-08-31 17:19:12 -0400794 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
795
Jamie Madill4d0bf552016-12-28 15:45:24 -0500796 // Ensure we can find a graphics queue family.
797 uint32_t queueCount = 0;
798 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
799
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400800 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500801
802 mQueueFamilyProperties.resize(queueCount);
803 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
804 mQueueFamilyProperties.data());
805
Jamie Madillb980c562018-11-27 11:34:27 -0500806 size_t graphicsQueueFamilyCount = false;
807 uint32_t firstGraphicsQueueFamily = 0;
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500808 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500809 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
810 {
811 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
Shahbaz Youssefi823d8972018-11-13 10:52:40 -0500812 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500813 {
814 ASSERT(queueInfo.queueCount > 0);
815 graphicsQueueFamilyCount++;
816 if (firstGraphicsQueueFamily == 0)
817 {
818 firstGraphicsQueueFamily = familyIndex;
819 }
820 break;
821 }
822 }
823
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400824 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
Jamie Madill4d0bf552016-12-28 15:45:24 -0500825
826 // If only one queue family, go ahead and initialize the device. If there is more than one
827 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
828 if (graphicsQueueFamilyCount == 1)
829 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400830 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500831 }
832
Jamie Madill035fd6b2017-10-03 15:43:22 -0400833 // Store the physical device memory properties so we can find the right memory pools.
834 mMemoryProperties.init(mPhysicalDevice);
Jamie Madill7b57b9d2017-01-13 09:33:38 -0500835
Jamie Madill06ca6342018-07-12 15:56:53 -0400836 GlslangWrapper::Initialize();
Jamie Madill8ecf7f92017-01-13 17:29:52 -0500837
Jamie Madill6a89d222017-11-02 11:59:51 -0400838 // Initialize the format table.
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -0500839 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
Jamie Madill6a89d222017-11-02 11:59:51 -0400840
Jamie Madill7c985f52018-11-29 18:16:17 -0500841 return angle::Result::Continue;
Jamie Madill9e54b5a2016-05-25 12:57:39 -0400842}
843
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400844angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500845{
846 uint32_t deviceLayerCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400847 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -0400848 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500849
850 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
851 if (deviceLayerCount > 0)
852 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400853 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
854 deviceLayerProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500855 }
856
Michael Spang6c13c702019-02-06 15:59:44 -0500857 VulkanLayerVector enabledDeviceLayerNames;
858 if (mEnableValidationLayers)
859 {
860 mEnableValidationLayers =
861 GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
862 }
863
864 const char *wsiLayer = displayVk->getWSILayer();
865 if (wsiLayer)
866 {
867 enabledDeviceLayerNames.push_back(wsiLayer);
868 }
869
870 // Enumerate device extensions that are provided by the vulkan
871 // implementation and implicit layers.
Jamie Madill4d0bf552016-12-28 15:45:24 -0500872 uint32_t deviceExtensionCount = 0;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400873 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
874 &deviceExtensionCount, nullptr));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500875
876 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
877 if (deviceExtensionCount > 0)
878 {
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400879 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
880 &deviceExtensionCount,
881 deviceExtensionProps.data()));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500882 }
883
Michael Spang6c13c702019-02-06 15:59:44 -0500884 // Enumerate device extensions that are provided by explicit layers.
885 for (const char *layerName : enabledDeviceLayerNames)
Jamie Madill4d0bf552016-12-28 15:45:24 -0500886 {
Michael Spang6c13c702019-02-06 15:59:44 -0500887 uint32_t previousExtensionCount = deviceExtensionProps.size();
888 uint32_t deviceLayerExtensionCount = 0;
889 ANGLE_VK_TRY(displayVk,
890 vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
891 &deviceLayerExtensionCount, nullptr));
892 deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
893 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
894 mPhysicalDevice, layerName, &deviceLayerExtensionCount,
895 deviceExtensionProps.data() + previousExtensionCount));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500896 }
897
Frank Henigman52047de2018-11-13 17:22:36 -0500898 ExtensionNameList deviceExtensionNames;
899 if (!deviceExtensionProps.empty())
900 {
901 ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
902 for (const VkExtensionProperties &prop : deviceExtensionProps)
903 {
904 deviceExtensionNames.push_back(prop.extensionName);
905 }
906 std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
907 }
908
909 ExtensionNameList enabledDeviceExtensions;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500910 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
911
Frank Henigman52047de2018-11-13 17:22:36 -0500912 initFeatures(deviceExtensionNames);
Ian Elliottbcb78902018-12-19 11:46:29 -0700913 mFeaturesInitialized = true;
914
Luc Ferronbf6dc372018-06-28 15:24:19 -0400915 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
Ian Elliott52f5da42018-12-21 09:02:09 -0700916 if ((getFeatures().flipViewportY) &&
917 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
Luc Ferronbf6dc372018-06-28 15:24:19 -0400918 {
919 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
920 }
Ian Elliottbcb78902018-12-19 11:46:29 -0700921 if (getFeatures().supportsIncrementalPresent)
922 {
923 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
924 }
Luc Ferronbf6dc372018-06-28 15:24:19 -0400925
Geoff Lang009696c2019-01-31 14:47:07 -0500926#if defined(ANGLE_PLATFORM_ANDROID)
927 if (getFeatures().supportsAndroidHardwareBuffer)
928 {
929 enabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
930 enabledDeviceExtensions.push_back(
931 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
932 InitExternalMemoryHardwareBufferANDROIDFunctions(mInstance);
933 }
934#else
935 ASSERT(!getFeatures().supportsAndroidHardwareBuffer);
936#endif
937
Frank Henigman52047de2018-11-13 17:22:36 -0500938 std::sort(enabledDeviceExtensions.begin(), enabledDeviceExtensions.end(), StrLess);
939 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionNames, enabledDeviceExtensions));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500940
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400941 // Select additional features to be enabled
Frank Henigman52047de2018-11-13 17:22:36 -0500942 VkPhysicalDeviceFeatures2KHR enabledFeatures = {};
943 enabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
944 enabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
945 enabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
Shahbaz Youssefi962c2222019-02-20 15:43:41 -0500946 enabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
Shahbaz Youssefi563fbaa2018-10-02 11:22:01 -0400947
Frank Henigman52047de2018-11-13 17:22:36 -0500948 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT divisorFeatures = {};
949 divisorFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
950 divisorFeatures.vertexAttributeInstanceRateDivisor = true;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500951
952 float zeroPriority = 0.0f;
Frank Henigman52047de2018-11-13 17:22:36 -0500953 VkDeviceQueueCreateInfo queueCreateInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500954 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
Jamie Madill4d0bf552016-12-28 15:45:24 -0500955 queueCreateInfo.flags = 0;
956 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
957 queueCreateInfo.queueCount = 1;
958 queueCreateInfo.pQueuePriorities = &zeroPriority;
959
960 // Initialize the device
Shahbaz Youssefi06270c92018-10-03 17:00:25 -0400961 VkDeviceCreateInfo createInfo = {};
Jamie Madill4d0bf552016-12-28 15:45:24 -0500962
Jamie Madill50cf2be2018-06-15 09:46:57 -0400963 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
Jamie Madill50cf2be2018-06-15 09:46:57 -0400964 createInfo.flags = 0;
965 createInfo.queueCreateInfoCount = 1;
966 createInfo.pQueueCreateInfos = &queueCreateInfo;
Michael Spang6c13c702019-02-06 15:59:44 -0500967 createInfo.enabledLayerCount = enabledDeviceLayerNames.size();
968 createInfo.ppEnabledLayerNames = enabledDeviceLayerNames.data();
Frank Henigman52047de2018-11-13 17:22:36 -0500969
970 if (vkGetPhysicalDeviceProperties2KHR &&
971 ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
972 {
973 enabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
974 enabledFeatures.pNext = &divisorFeatures;
975
976 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT divisorProperties = {};
977 divisorProperties.sType =
978 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
979
980 VkPhysicalDeviceProperties2 deviceProperties = {};
981 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
982 deviceProperties.pNext = &divisorProperties;
983
984 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
985 mMaxVertexAttribDivisor = divisorProperties.maxVertexAttribDivisor;
986
987 createInfo.pNext = &enabledFeatures;
988 }
989 else
990 {
991 createInfo.pEnabledFeatures = &enabledFeatures.features;
992 }
993
Jamie Madill4d0bf552016-12-28 15:45:24 -0500994 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
995 createInfo.ppEnabledExtensionNames =
996 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
Jamie Madill4d0bf552016-12-28 15:45:24 -0500997
Shahbaz Youssefi996628a2018-09-24 16:39:26 -0400998 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
Jamie Madill4d0bf552016-12-28 15:45:24 -0500999
1000 mCurrentQueueFamilyIndex = queueFamilyIndex;
1001
1002 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
1003
1004 // Initialize the command pool now that we know the queue family index.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001005 VkCommandPoolCreateInfo commandPoolInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -05001006 commandPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1007 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
1008 commandPoolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001009
Tobin Ehlis47ca1b22019-01-23 16:11:41 +00001010 ANGLE_VK_TRY(displayVk, mCommandPool.init(mDevice, commandPoolInfo));
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001011
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001012 // Initialize the vulkan pipeline cache.
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001013 ANGLE_TRY(initPipelineCache(displayVk));
Jamie Madill4d0bf552016-12-28 15:45:24 -05001014
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001015 // Initialize the submission semaphore pool.
1016 ANGLE_TRY(mSubmitSemaphorePool.init(displayVk, vk::kDefaultSemaphorePoolSize));
1017
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001018#if ANGLE_ENABLE_VULKAN_GPU_TRACE_EVENTS
1019 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1020 ASSERT(platform);
1021
1022 // GPU tracing workaround for anglebug.com/2927. The renderer should not emit gpu events during
1023 // platform discovery.
1024 const unsigned char *gpuEventsEnabled =
1025 platform->getTraceCategoryEnabledFlag(platform, "gpu.angle.gpu");
1026 mGpuEventsEnabled = gpuEventsEnabled && *gpuEventsEnabled;
1027#endif
1028
1029 if (mGpuEventsEnabled)
1030 {
1031 // Calculate the difference between CPU and GPU clocks for GPU event reporting.
1032 ANGLE_TRY(mGpuEventQueryPool.init(displayVk, VK_QUERY_TYPE_TIMESTAMP,
1033 vk::kDefaultTimestampQueryPoolSize));
1034 ANGLE_TRY(synchronizeCpuGpuTime(displayVk));
1035 }
1036
Jamie Madill7c985f52018-11-29 18:16:17 -05001037 return angle::Result::Continue;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001038}
1039
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001040angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
Jamie Madill21061022018-07-12 23:56:30 -04001041 VkSurfaceKHR surface,
1042 uint32_t *presentQueueOut)
Jamie Madill4d0bf552016-12-28 15:45:24 -05001043{
1044 // We've already initialized a device, and can't re-create it unless it's never been used.
1045 // TODO(jmadill): Handle the re-creation case if necessary.
1046 if (mDevice != VK_NULL_HANDLE)
1047 {
1048 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
1049
1050 // Check if the current device supports present on this surface.
1051 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001052 ANGLE_VK_TRY(displayVk,
Jamie Madill21061022018-07-12 23:56:30 -04001053 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
Jamie Madill4d0bf552016-12-28 15:45:24 -05001054 surface, &supportsPresent));
1055
Jamie Madill6cad7732018-07-11 09:01:17 -04001056 if (supportsPresent == VK_TRUE)
1057 {
1058 *presentQueueOut = mCurrentQueueFamilyIndex;
Jamie Madill7c985f52018-11-29 18:16:17 -05001059 return angle::Result::Continue;
Jamie Madill6cad7732018-07-11 09:01:17 -04001060 }
Jamie Madill4d0bf552016-12-28 15:45:24 -05001061 }
1062
1063 // Find a graphics and present queue.
1064 Optional<uint32_t> newPresentQueue;
1065 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
Shahbaz Youssefi823d8972018-11-13 10:52:40 -05001066 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001067 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
1068 {
1069 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
Shahbaz Youssefi823d8972018-11-13 10:52:40 -05001070 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
Jamie Madill4d0bf552016-12-28 15:45:24 -05001071 {
1072 VkBool32 supportsPresent = VK_FALSE;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001073 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
1074 mPhysicalDevice, queueIndex, surface, &supportsPresent));
Jamie Madill4d0bf552016-12-28 15:45:24 -05001075
1076 if (supportsPresent == VK_TRUE)
1077 {
1078 newPresentQueue = queueIndex;
1079 break;
1080 }
1081 }
1082 }
1083
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001084 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
1085 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
Jamie Madill4d0bf552016-12-28 15:45:24 -05001086
Jamie Madill6cad7732018-07-11 09:01:17 -04001087 *presentQueueOut = newPresentQueue.value();
Jamie Madill7c985f52018-11-29 18:16:17 -05001088 return angle::Result::Continue;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001089}
1090
1091std::string RendererVk::getVendorString() const
1092{
Olli Etuahoc6a06182018-04-13 14:11:46 +03001093 return GetVendorString(mPhysicalDeviceProperties.vendorID);
Jamie Madill4d0bf552016-12-28 15:45:24 -05001094}
1095
Jamie Madille09bd5d2016-11-29 16:20:35 -05001096std::string RendererVk::getRendererDescription() const
1097{
Jamie Madill4d0bf552016-12-28 15:45:24 -05001098 std::stringstream strstr;
1099
1100 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
1101
1102 strstr << "Vulkan ";
1103 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
1104 strstr << VK_VERSION_MINOR(apiVersion) << ".";
1105 strstr << VK_VERSION_PATCH(apiVersion);
1106
Olli Etuahoc6a06182018-04-13 14:11:46 +03001107 strstr << "(";
1108
1109 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
1110 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
1111 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
1112 // driver detection.
1113 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
1114 {
1115 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
1116 }
1117
Geoff Langa7af56b2018-12-14 14:20:28 -05001118 strstr << mPhysicalDeviceProperties.deviceName;
1119 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
1120
1121 strstr << ")";
Jamie Madill4d0bf552016-12-28 15:45:24 -05001122
1123 return strstr.str();
Jamie Madille09bd5d2016-11-29 16:20:35 -05001124}
1125
Shahbaz Youssefi092481a2018-11-08 00:25:50 -05001126gl::Version RendererVk::getMaxSupportedESVersion() const
1127{
Geoff Lang0c2c9232019-01-14 16:01:38 -05001128 // Current highest supported version
Shahbaz Youssefi9dc8eaf2019-02-11 11:04:54 -05001129 gl::Version maxVersion = gl::Version(3, 0);
1130
1131#if ANGLE_VULKAN_CONFORMANT_CONFIGS_ONLY
1132 // TODO: Disallow ES 3.0 until supported. http://crbug.com/angleproject/2950
1133 maxVersion = gl::Version(2, 0);
1134#endif
Shahbaz Youssefi092481a2018-11-08 00:25:50 -05001135
Geoff Lang0c2c9232019-01-14 16:01:38 -05001136 // Vulkan inherited queries are required to support any GL query type
1137 if (!mPhysicalDeviceFeatures.inheritedQueries)
Shahbaz Youssefi092481a2018-11-08 00:25:50 -05001138 {
Geoff Lang0c2c9232019-01-14 16:01:38 -05001139 maxVersion = std::max(maxVersion, gl::Version(2, 0));
Shahbaz Youssefi092481a2018-11-08 00:25:50 -05001140 }
1141
Geoff Lang0c2c9232019-01-14 16:01:38 -05001142 return maxVersion;
Shahbaz Youssefi092481a2018-11-08 00:25:50 -05001143}
1144
Frank Henigman52047de2018-11-13 17:22:36 -05001145void RendererVk::initFeatures(const ExtensionNameList &deviceExtensionNames)
Jamie Madill12222072018-07-11 14:59:48 -04001146{
Jamie Madillb36a4812018-09-25 10:15:11 -04001147// Use OpenGL line rasterization rules by default.
1148// TODO(jmadill): Fix Android support. http://anglebug.com/2830
1149#if defined(ANGLE_PLATFORM_ANDROID)
1150 mFeatures.basicGLLineRasterization = false;
1151#else
Jamie Madill12222072018-07-11 14:59:48 -04001152 mFeatures.basicGLLineRasterization = true;
Jamie Madillb36a4812018-09-25 10:15:11 -04001153#endif // defined(ANGLE_PLATFORM_ANDROID)
Jamie Madill12222072018-07-11 14:59:48 -04001154
Ian Elliott52f5da42018-12-21 09:02:09 -07001155 if ((mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
Frank Henigman52047de2018-11-13 17:22:36 -05001156 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames))
Ian Elliottd50521f2018-12-20 12:05:14 -07001157 {
1158 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
1159 // investigation. http://anglebug.com/2728
1160 mFeatures.flipViewportY = !IsIntel(mPhysicalDeviceProperties.vendorID);
1161 }
Frank Henigmanbeb669d2018-09-21 16:25:52 -04001162
1163#ifdef ANGLE_PLATFORM_WINDOWS
1164 // http://anglebug.com/2838
1165 mFeatures.extraCopyBufferRegion = IsIntel(mPhysicalDeviceProperties.vendorID);
Shahbaz Youssefi4f3b2072019-01-01 14:48:25 -05001166
1167 // http://anglebug.com/3055
1168 mFeatures.forceCpuPathForCubeMapCopy = IsIntel(mPhysicalDeviceProperties.vendorID);
Frank Henigmanbeb669d2018-09-21 16:25:52 -04001169#endif
Shahbaz Youssefid856ca42018-10-31 16:55:12 -04001170
1171 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1172 platform->overrideFeaturesVk(platform, &mFeatures);
Jamie Madillfde74c02018-11-18 16:12:02 -05001173
1174 // Work around incorrect NVIDIA point size range clamping.
1175 // TODO(jmadill): Narrow driver range once fixed. http://anglebug.com/2970
1176 if (IsNvidia(mPhysicalDeviceProperties.vendorID))
1177 {
1178 mFeatures.clampPointSize = true;
1179 }
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +01001180
Jamie Madillfa7ca182019-01-15 11:20:58 -05001181 // We also need to clamp point size on several Android drivers.
1182 // TODO(jmadill): Remove suppression once fixed. http://anglebug.com/2599
1183 if (IsAndroid())
1184 {
1185 mFeatures.clampPointSize = true;
1186 }
1187
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +01001188#if defined(ANGLE_PLATFORM_ANDROID)
Shahbaz Youssefib08457d2018-12-11 15:13:54 -05001189 // Work around ineffective compute-graphics barriers on Nexus 5X.
1190 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
1191 // http://anglebug.com/3019
1192 mFeatures.flushAfterVertexConversion =
1193 IsNexus5X(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +01001194#endif
Ian Elliottbcb78902018-12-19 11:46:29 -07001195
Frank Henigman52047de2018-11-13 17:22:36 -05001196 if (ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames))
Ian Elliottbcb78902018-12-19 11:46:29 -07001197 {
1198 mFeatures.supportsIncrementalPresent = true;
1199 }
Geoff Lang009696c2019-01-31 14:47:07 -05001200
1201#if defined(ANGLE_PLATFORM_ANDROID)
1202 mFeatures.supportsAndroidHardwareBuffer = ExtensionFound(
1203 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, deviceExtensionNames);
1204#endif
Jamie Madill12222072018-07-11 14:59:48 -04001205}
1206
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001207void RendererVk::initPipelineCacheVkKey()
1208{
1209 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1210 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1211 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
1212 // there is no '\0' in the result.
1213 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
1214 {
1215 hashStream << std::hex << c;
1216 }
1217 // Add the vendor and device id too for good measure.
1218 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
1219 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
1220
1221 const std::string &hashString = hashStream.str();
1222 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1223 hashString.length(), mPipelineCacheVkBlobKey.data());
1224}
1225
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001226angle::Result RendererVk::initPipelineCache(DisplayVk *display)
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001227{
1228 initPipelineCacheVkKey();
1229
1230 egl::BlobCache::Value initialData;
1231 bool success = display->getBlobCache()->get(display->getScratchBuffer(),
1232 mPipelineCacheVkBlobKey, &initialData);
1233
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001234 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001235
1236 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001237 pipelineCacheCreateInfo.flags = 0;
1238 pipelineCacheCreateInfo.initialDataSize = success ? initialData.size() : 0;
1239 pipelineCacheCreateInfo.pInitialData = success ? initialData.data() : nullptr;
1240
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001241 ANGLE_VK_TRY(display, mPipelineCache.init(mDevice, pipelineCacheCreateInfo));
Jamie Madill7c985f52018-11-29 18:16:17 -05001242 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001243}
1244
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001245void RendererVk::getSubmitWaitSemaphores(
1246 vk::Context *context,
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001247 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> *waitSemaphores,
1248 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> *waitStageMasks)
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001249{
1250 if (mSubmitLastSignaledSemaphore.getSemaphore())
1251 {
1252 waitSemaphores->push_back(mSubmitLastSignaledSemaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001253 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001254
1255 // Return the semaphore to the pool (which will remain valid and unused until the
1256 // queue it's about to be waited on has finished execution).
1257 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
1258 }
1259
1260 for (vk::SemaphoreHelper &semaphore : mSubmitWaitSemaphores)
1261 {
1262 waitSemaphores->push_back(semaphore.getSemaphore()->getHandle());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001263 waitStageMasks->push_back(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
1264
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001265 mSubmitSemaphorePool.freeSemaphore(context, &semaphore);
1266 }
1267 mSubmitWaitSemaphores.clear();
1268}
1269
Jamie Madillacccc6c2016-05-03 17:22:10 -04001270const gl::Caps &RendererVk::getNativeCaps() const
1271{
1272 ensureCapsInitialized();
1273 return mNativeCaps;
1274}
1275
1276const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
1277{
1278 ensureCapsInitialized();
1279 return mNativeTextureCaps;
1280}
1281
1282const gl::Extensions &RendererVk::getNativeExtensions() const
1283{
1284 ensureCapsInitialized();
1285 return mNativeExtensions;
1286}
1287
1288const gl::Limitations &RendererVk::getNativeLimitations() const
1289{
1290 ensureCapsInitialized();
1291 return mNativeLimitations;
1292}
1293
Luc Ferrondaedf4d2018-03-16 09:28:53 -04001294uint32_t RendererVk::getMaxActiveTextures()
1295{
1296 // TODO(lucferron): expose this limitation to GL in Context Caps
1297 return std::min<uint32_t>(mPhysicalDeviceProperties.limits.maxPerStageDescriptorSamplers,
1298 gl::IMPLEMENTATION_MAX_ACTIVE_TEXTURES);
1299}
1300
Jamie Madill49ac74b2017-12-21 14:42:33 -05001301const vk::CommandPool &RendererVk::getCommandPool() const
Jamie Madill4d0bf552016-12-28 15:45:24 -05001302{
Jamie Madill49ac74b2017-12-21 14:42:33 -05001303 return mCommandPool;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001304}
1305
Jamie Madill21061022018-07-12 23:56:30 -04001306angle::Result RendererVk::finish(vk::Context *context)
Jamie Madill4d0bf552016-12-28 15:45:24 -05001307{
Jamie Madill1f46bc12018-02-20 16:09:43 -05001308 if (!mCommandGraph.empty())
Jamie Madill49ac74b2017-12-21 14:42:33 -05001309 {
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001310 TRACE_EVENT0("gpu.angle", "RendererVk::finish");
1311
Luc Ferron1617e692018-07-11 11:08:19 -04001312 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1313 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill0c0dc342017-03-24 14:18:51 -04001314
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001315 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001316 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1317 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001318
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001319 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -05001320 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001321 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1322 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001323 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001324 submitInfo.commandBufferCount = 1;
Luc Ferron1617e692018-07-11 11:08:19 -04001325 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001326 submitInfo.signalSemaphoreCount = 0;
1327 submitInfo.pSignalSemaphores = nullptr;
Jamie Madill4d0bf552016-12-28 15:45:24 -05001328
Jamie Madill21061022018-07-12 23:56:30 -04001329 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBatch.get())));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001330 }
Jamie Madill4d0bf552016-12-28 15:45:24 -05001331
Jamie Madill4c26fc22017-02-24 11:04:10 -05001332 ASSERT(mQueue != VK_NULL_HANDLE);
Jamie Madill21061022018-07-12 23:56:30 -04001333 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueue));
Jamie Madill0c0dc342017-03-24 14:18:51 -04001334 freeAllInFlightResources();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001335
1336 if (mGpuEventsEnabled)
1337 {
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001338 // This loop should in practice execute once since the queue is already idle.
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001339 while (mInFlightGpuEventQueries.size() > 0)
1340 {
1341 ANGLE_TRY(checkCompletedGpuEvents(context));
1342 }
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001343 // Recalculate the CPU/GPU time difference to account for clock drifting. Avoid unnecessary
1344 // synchronization if there is no event to be adjusted (happens when finish() gets called
1345 // multiple times towards the end of the application).
1346 if (mGpuEvents.size() > 0)
1347 {
1348 ANGLE_TRY(synchronizeCpuGpuTime(context));
1349 }
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001350 }
1351
Jamie Madill7c985f52018-11-29 18:16:17 -05001352 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001353}
1354
Jamie Madill0c0dc342017-03-24 14:18:51 -04001355void RendererVk::freeAllInFlightResources()
1356{
Jamie Madill49ac74b2017-12-21 14:42:33 -05001357 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill0c0dc342017-03-24 14:18:51 -04001358 {
Yuly Novikovb56ddbb2018-11-02 16:53:18 -04001359 // On device loss we need to wait for fence to be signaled before destroying it
1360 if (mDeviceLost)
1361 {
1362 VkResult status = batch.fence.wait(mDevice, kMaxFenceWaitTimeNs);
1363 // If wait times out, it is probably not possible to recover from lost device
1364 ASSERT(status == VK_SUCCESS || status == VK_ERROR_DEVICE_LOST);
1365 }
Jamie Madill49ac74b2017-12-21 14:42:33 -05001366 batch.fence.destroy(mDevice);
Tobin Ehlis47ca1b22019-01-23 16:11:41 +00001367 batch.commandPool.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001368 }
1369 mInFlightCommands.clear();
1370
1371 for (auto &garbage : mGarbage)
1372 {
Jamie Madille88ec8e2017-10-31 17:18:14 -04001373 garbage.destroy(mDevice);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001374 }
1375 mGarbage.clear();
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001376
1377 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
Jamie Madill0c0dc342017-03-24 14:18:51 -04001378}
1379
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001380angle::Result RendererVk::checkCompletedCommands(vk::Context *context)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001381{
Jamie Madill49ac74b2017-12-21 14:42:33 -05001382 int finishedCount = 0;
Jamie Madillf651c772017-02-21 15:03:51 -05001383
Jamie Madill49ac74b2017-12-21 14:42:33 -05001384 for (CommandBatch &batch : mInFlightCommands)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001385 {
Yuly Novikov27780292018-11-09 11:19:49 -05001386 VkResult result = batch.fence.getStatus(mDevice);
1387 if (result == VK_NOT_READY)
1388 {
Jamie Madill0c0dc342017-03-24 14:18:51 -04001389 break;
Yuly Novikov27780292018-11-09 11:19:49 -05001390 }
1391 ANGLE_VK_TRY(context, result);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001392
Jamie Madill49ac74b2017-12-21 14:42:33 -05001393 ASSERT(batch.serial > mLastCompletedQueueSerial);
1394 mLastCompletedQueueSerial = batch.serial;
Jamie Madill0c0dc342017-03-24 14:18:51 -04001395
Jamie Madill49ac74b2017-12-21 14:42:33 -05001396 batch.fence.destroy(mDevice);
Tobin Ehlis4a419142019-02-05 08:50:30 -07001397 TRACE_EVENT0("gpu.angle", "commandPool.destroy");
Tobin Ehlis47ca1b22019-01-23 16:11:41 +00001398 batch.commandPool.destroy(mDevice);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001399 ++finishedCount;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001400 }
1401
Jamie Madill49ac74b2017-12-21 14:42:33 -05001402 mInFlightCommands.erase(mInFlightCommands.begin(), mInFlightCommands.begin() + finishedCount);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001403
1404 size_t freeIndex = 0;
1405 for (; freeIndex < mGarbage.size(); ++freeIndex)
1406 {
Jamie Madill49ac74b2017-12-21 14:42:33 -05001407 if (!mGarbage[freeIndex].destroyIfComplete(mDevice, mLastCompletedQueueSerial))
Jamie Madill0c0dc342017-03-24 14:18:51 -04001408 break;
1409 }
1410
1411 // Remove the entries from the garbage list - they should be ready to go.
1412 if (freeIndex > 0)
1413 {
1414 mGarbage.erase(mGarbage.begin(), mGarbage.begin() + freeIndex);
Jamie Madillf651c772017-02-21 15:03:51 -05001415 }
1416
Jamie Madill7c985f52018-11-29 18:16:17 -05001417 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001418}
1419
Jamie Madill21061022018-07-12 23:56:30 -04001420angle::Result RendererVk::submitFrame(vk::Context *context,
1421 const VkSubmitInfo &submitInfo,
1422 vk::CommandBuffer &&commandBuffer)
Jamie Madill4c26fc22017-02-24 11:04:10 -05001423{
Tobin Ehlis573f76b2018-05-03 11:10:44 -06001424 TRACE_EVENT0("gpu.angle", "RendererVk::submitFrame");
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001425 VkFenceCreateInfo fenceInfo = {};
Jamie Madillb980c562018-11-27 11:34:27 -05001426 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1427 fenceInfo.flags = 0;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001428
Tobin Ehlis47ca1b22019-01-23 16:11:41 +00001429 vk::Scoped<CommandBatch> scopedBatch(mDevice);
Jamie Madillbea35a62018-07-05 11:54:10 -04001430 CommandBatch &batch = scopedBatch.get();
Yuly Novikov27780292018-11-09 11:19:49 -05001431 ANGLE_VK_TRY(context, batch.fence.init(mDevice, fenceInfo));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001432
Jamie Madill21061022018-07-12 23:56:30 -04001433 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, batch.fence.getHandle()));
Jamie Madill4c26fc22017-02-24 11:04:10 -05001434
1435 // Store this command buffer in the in-flight list.
Jamie Madill49ac74b2017-12-21 14:42:33 -05001436 batch.commandPool = std::move(mCommandPool);
1437 batch.serial = mCurrentQueueSerial;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001438
Jamie Madillbea35a62018-07-05 11:54:10 -04001439 mInFlightCommands.emplace_back(scopedBatch.release());
Jamie Madill0c0dc342017-03-24 14:18:51 -04001440
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001441 // CPU should be throttled to avoid mInFlightCommands from growing too fast. That is done on
1442 // swap() though, and there could be multiple submissions in between (through glFlush() calls),
Shahbaz Youssefi611bbaa2018-12-06 01:59:53 +01001443 // so the limit is larger than the expected number of images. The
1444 // InterleavedAttributeDataBenchmark perf test for example issues a large number of flushes.
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001445 ASSERT(mInFlightCommands.size() <= kInFlightCommandsLimit);
Jamie Madill0c0dc342017-03-24 14:18:51 -04001446
Jamie Madill85ca1892019-01-16 13:27:15 -05001447 nextSerial();
Jamie Madill0c0dc342017-03-24 14:18:51 -04001448
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001449 ANGLE_TRY(checkCompletedCommands(context));
Jamie Madill0c0dc342017-03-24 14:18:51 -04001450
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001451 if (mGpuEventsEnabled)
1452 {
1453 ANGLE_TRY(checkCompletedGpuEvents(context));
1454 }
1455
Jamie Madill49ac74b2017-12-21 14:42:33 -05001456 // Simply null out the command buffer here - it was allocated using the command pool.
1457 commandBuffer.releaseHandle();
1458
1459 // Reallocate the command pool for next frame.
1460 // TODO(jmadill): Consider reusing command pools.
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001461 VkCommandPoolCreateInfo poolInfo = {};
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001462 poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001463 poolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001464 poolInfo.queueFamilyIndex = mCurrentQueueFamilyIndex;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001465
Tobin Ehlis47ca1b22019-01-23 16:11:41 +00001466 ANGLE_VK_TRY(context, mCommandPool.init(mDevice, poolInfo));
Jamie Madill7c985f52018-11-29 18:16:17 -05001467 return angle::Result::Continue;
Jamie Madill4c26fc22017-02-24 11:04:10 -05001468}
1469
Jamie Madill85ca1892019-01-16 13:27:15 -05001470void RendererVk::nextSerial()
1471{
1472 // Increment the queue serial. If this fails, we should restart ANGLE.
1473 mLastSubmittedQueueSerial = mCurrentQueueSerial;
1474 mCurrentQueueSerial = mQueueSerialFactory.generate();
1475
1476 // Notify the Contexts that they should be starting new command buffers.
1477 // We use one command pool per serial/submit associated with this VkQueue. We can also
1478 // have multiple Contexts sharing one VkQueue. In ContextVk::setupDraw we don't explicitly
1479 // check for a new serial when starting a new command buffer. We just check that the current
1480 // recording command buffer is valid. Thus we need to explicitly notify every other Context
1481 // using this VkQueue that they their current command buffer is no longer valid.
1482 for (gl::Context *context : mDisplay->getContextSet())
1483 {
1484 ContextVk *contextVk = vk::GetImpl(context);
1485 contextVk->onCommandBufferFinished();
1486 }
1487}
1488
Jamie Madillaaca96e2018-06-12 10:19:48 -04001489bool RendererVk::isSerialInUse(Serial serial) const
Jamie Madill97760352017-11-09 13:08:29 -05001490{
1491 return serial > mLastCompletedQueueSerial;
1492}
1493
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001494angle::Result RendererVk::finishToSerial(vk::Context *context, Serial serial)
1495{
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -05001496 bool timedOut = false;
1497 angle::Result result = finishToSerialOrTimeout(context, serial, kMaxFenceWaitTimeNs, &timedOut);
1498
1499 // Don't tolerate timeout. If such a large wait time results in timeout, something's wrong.
1500 if (timedOut)
1501 {
1502 result = angle::Result::Stop;
1503 }
1504 return result;
1505}
1506
1507angle::Result RendererVk::finishToSerialOrTimeout(vk::Context *context,
1508 Serial serial,
1509 uint64_t timeout,
1510 bool *outTimedOut)
1511{
1512 *outTimedOut = false;
1513
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001514 if (!isSerialInUse(serial) || mInFlightCommands.empty())
1515 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001516 return angle::Result::Continue;
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001517 }
1518
1519 // Find the first batch with serial equal to or bigger than given serial (note that
1520 // the batch serials are unique, otherwise upper-bound would have been necessary).
1521 size_t batchIndex = mInFlightCommands.size() - 1;
1522 for (size_t i = 0; i < mInFlightCommands.size(); ++i)
1523 {
1524 if (mInFlightCommands[i].serial >= serial)
1525 {
1526 batchIndex = i;
1527 break;
1528 }
1529 }
1530 const CommandBatch &batch = mInFlightCommands[batchIndex];
1531
1532 // Wait for it finish
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -05001533 VkResult status = batch.fence.wait(mDevice, kMaxFenceWaitTimeNs);
1534
1535 // If timed out, report it as such.
1536 if (status == VK_TIMEOUT)
1537 {
1538 *outTimedOut = true;
1539 return angle::Result::Continue;
1540 }
1541
1542 ANGLE_VK_TRY(context, status);
Shahbaz Youssefic4765aa2018-10-12 14:40:29 -04001543
1544 // Clean up finished batches.
1545 return checkCompletedCommands(context);
1546}
1547
Jamie Madill21061022018-07-12 23:56:30 -04001548angle::Result RendererVk::getCompatibleRenderPass(vk::Context *context,
1549 const vk::RenderPassDesc &desc,
1550 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001551{
Jamie Madill21061022018-07-12 23:56:30 -04001552 return mRenderPassCache.getCompatibleRenderPass(context, mCurrentQueueSerial, desc,
Jamie Madill9f2a8612017-11-30 12:43:09 -05001553 renderPassOut);
1554}
1555
Jamie Madill21061022018-07-12 23:56:30 -04001556angle::Result RendererVk::getRenderPassWithOps(vk::Context *context,
1557 const vk::RenderPassDesc &desc,
1558 const vk::AttachmentOpsArray &ops,
1559 vk::RenderPass **renderPassOut)
Jamie Madill9f2a8612017-11-30 12:43:09 -05001560{
Jamie Madill21061022018-07-12 23:56:30 -04001561 return mRenderPassCache.getRenderPassWithOps(context, mCurrentQueueSerial, desc, ops,
Jamie Madillbef918c2017-12-13 13:11:30 -05001562 renderPassOut);
Jamie Madill9f2a8612017-11-30 12:43:09 -05001563}
1564
Jamie Madilla5e06072018-05-18 14:36:05 -04001565vk::CommandGraph *RendererVk::getCommandGraph()
Jamie Madill49ac74b2017-12-21 14:42:33 -05001566{
Jamie Madilla5e06072018-05-18 14:36:05 -04001567 return &mCommandGraph;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001568}
1569
Jamie Madill21061022018-07-12 23:56:30 -04001570angle::Result RendererVk::flushCommandGraph(vk::Context *context, vk::CommandBuffer *commandBatch)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001571{
Jamie Madill21061022018-07-12 23:56:30 -04001572 return mCommandGraph.submitCommands(context, mCurrentQueueSerial, &mRenderPassCache,
Jamie Madill1f46bc12018-02-20 16:09:43 -05001573 &mCommandPool, commandBatch);
Jamie Madill49ac74b2017-12-21 14:42:33 -05001574}
1575
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001576angle::Result RendererVk::flush(vk::Context *context)
Jamie Madill49ac74b2017-12-21 14:42:33 -05001577{
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001578 if (mCommandGraph.empty())
1579 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001580 return angle::Result::Continue;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001581 }
1582
Shahbaz Youssefi61656022018-10-24 15:00:50 -04001583 TRACE_EVENT0("gpu.angle", "RendererVk::flush");
1584
Jamie Madillbea35a62018-07-05 11:54:10 -04001585 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1586 ANGLE_TRY(flushCommandGraph(context, &commandBatch.get()));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001587
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001588 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001589 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1590 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001591
1592 // On every flush, create a semaphore to be signaled. On the next submission, this semaphore
1593 // will be waited on.
1594 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &mSubmitLastSignaledSemaphore));
Jamie Madill49ac74b2017-12-21 14:42:33 -05001595
Shahbaz Youssefi06270c92018-10-03 17:00:25 -04001596 VkSubmitInfo submitInfo = {};
Jamie Madill49ac74b2017-12-21 14:42:33 -05001597 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001598 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1599 submitInfo.pWaitSemaphores = waitSemaphores.data();
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001600 submitInfo.pWaitDstStageMask = waitStageMasks.data();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001601 submitInfo.commandBufferCount = 1;
Jamie Madillbea35a62018-07-05 11:54:10 -04001602 submitInfo.pCommandBuffers = commandBatch.get().ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001603 submitInfo.signalSemaphoreCount = 1;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001604 submitInfo.pSignalSemaphores = mSubmitLastSignaledSemaphore.getSemaphore()->ptr();
Jamie Madill49ac74b2017-12-21 14:42:33 -05001605
Jamie Madill21061022018-07-12 23:56:30 -04001606 ANGLE_TRY(submitFrame(context, submitInfo, commandBatch.release()));
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001607
Jamie Madill7c985f52018-11-29 18:16:17 -05001608 return angle::Result::Continue;
Jamie Madill49ac74b2017-12-21 14:42:33 -05001609}
1610
Jamie Madill78feddc2018-04-27 11:45:05 -04001611Serial RendererVk::issueShaderSerial()
Jamie Madillf2f6d372018-01-10 21:37:23 -05001612{
Jamie Madill78feddc2018-04-27 11:45:05 -04001613 return mShaderSerialFactory.generate();
Jamie Madillf2f6d372018-01-10 21:37:23 -05001614}
1615
Jamie Madill21061022018-07-12 23:56:30 -04001616angle::Result RendererVk::getDescriptorSetLayout(
1617 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001618 const vk::DescriptorSetLayoutDesc &desc,
1619 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1620{
Jamie Madill21061022018-07-12 23:56:30 -04001621 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
Jamie Madill9b168d02018-06-13 13:25:32 -04001622}
1623
Jamie Madill21061022018-07-12 23:56:30 -04001624angle::Result RendererVk::getPipelineLayout(
1625 vk::Context *context,
Jamie Madill9b168d02018-06-13 13:25:32 -04001626 const vk::PipelineLayoutDesc &desc,
1627 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1628 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1629{
Jamie Madill21061022018-07-12 23:56:30 -04001630 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
Jamie Madill9b168d02018-06-13 13:25:32 -04001631 pipelineLayoutOut);
1632}
1633
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001634angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1635{
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001636 ASSERT(mPipelineCache.valid());
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001637
1638 if (--mPipelineCacheVkUpdateTimeout > 0)
1639 {
Jamie Madill7c985f52018-11-29 18:16:17 -05001640 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001641 }
1642
1643 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1644
1645 // Get the size of the cache.
1646 size_t pipelineCacheSize = 0;
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001647 VkResult result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, nullptr);
Yuly Novikov27780292018-11-09 11:19:49 -05001648 if (result != VK_INCOMPLETE)
1649 {
1650 ANGLE_VK_TRY(displayVk, result);
1651 }
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001652
1653 angle::MemoryBuffer *pipelineCacheData = nullptr;
1654 ANGLE_VK_CHECK_ALLOC(displayVk,
1655 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1656
1657 size_t originalPipelineCacheSize = pipelineCacheSize;
Jamie Madilldc65c5b2018-11-21 11:07:26 -05001658 result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001659 // Note: currently we don't accept incomplete as we don't expect it (the full size of cache
1660 // was determined just above), so receiving it hints at an implementation bug we would want
1661 // to know about early.
Yuly Novikov27780292018-11-09 11:19:49 -05001662 ASSERT(result != VK_INCOMPLETE);
1663 ANGLE_VK_TRY(displayVk, result);
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001664
1665 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1666 // the buffer to avoid leaking garbage memory.
1667 ASSERT(pipelineCacheSize <= originalPipelineCacheSize);
1668 if (pipelineCacheSize < originalPipelineCacheSize)
1669 {
1670 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1671 originalPipelineCacheSize - pipelineCacheSize);
1672 }
1673
1674 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1675
Jamie Madill7c985f52018-11-29 18:16:17 -05001676 return angle::Result::Continue;
Shahbaz Youssefi996628a2018-09-24 16:39:26 -04001677}
1678
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001679angle::Result RendererVk::allocateSubmitWaitSemaphore(vk::Context *context,
1680 const vk::Semaphore **outSemaphore)
1681{
1682 ASSERT(mSubmitWaitSemaphores.size() < mSubmitWaitSemaphores.max_size());
1683
1684 vk::SemaphoreHelper semaphore;
1685 ANGLE_TRY(mSubmitSemaphorePool.allocateSemaphore(context, &semaphore));
1686
1687 mSubmitWaitSemaphores.push_back(std::move(semaphore));
1688 *outSemaphore = mSubmitWaitSemaphores.back().getSemaphore();
1689
Jamie Madill7c985f52018-11-29 18:16:17 -05001690 return angle::Result::Continue;
Shahbaz Youssefi3a482172018-10-11 10:34:44 -04001691}
1692
1693const vk::Semaphore *RendererVk::getSubmitLastSignaledSemaphore(vk::Context *context)
1694{
1695 const vk::Semaphore *semaphore = mSubmitLastSignaledSemaphore.getSemaphore();
1696
1697 // Return the semaphore to the pool (which will remain valid and unused until the
1698 // queue it's about to be waited on has finished execution). The caller is about
1699 // to wait on it.
1700 mSubmitSemaphorePool.freeSemaphore(context, &mSubmitLastSignaledSemaphore);
1701
1702 return semaphore;
1703}
1704
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001705angle::Result RendererVk::getTimestamp(vk::Context *context, uint64_t *timestampOut)
1706{
1707 // The intent of this function is to query the timestamp without stalling the GPU. Currently,
1708 // that seems impossible, so instead, we are going to make a small submission with just a
1709 // timestamp query. First, the disjoint timer query extension says:
1710 //
1711 // > This will return the GL time after all previous commands have reached the GL server but
1712 // have not yet necessarily executed.
1713 //
1714 // The previous commands are stored in the command graph at the moment and are not yet flushed.
1715 // The wording allows us to make a submission to get the timestamp without performing a flush.
1716 //
1717 // Second:
1718 //
1719 // > By using a combination of this synchronous get command and the asynchronous timestamp query
1720 // object target, applications can measure the latency between when commands reach the GL server
1721 // and when they are realized in the framebuffer.
1722 //
1723 // This fits with the above strategy as well, although inevitably we are possibly introducing a
1724 // GPU bubble. This function directly generates a command buffer and submits it instead of
1725 // using the other member functions. This is to avoid changing any state, such as the queue
1726 // serial.
1727
1728 // Create a query used to receive the GPU timestamp
1729 vk::Scoped<vk::DynamicQueryPool> timestampQueryPool(mDevice);
1730 vk::QueryHelper timestampQuery;
1731 ANGLE_TRY(timestampQueryPool.get().init(context, VK_QUERY_TYPE_TIMESTAMP, 1));
1732 ANGLE_TRY(timestampQueryPool.get().allocateQuery(context, &timestampQuery));
1733
1734 // Record the command buffer
1735 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1736 vk::CommandBuffer &commandBuffer = commandBatch.get();
1737
1738 VkCommandBufferAllocateInfo commandBufferInfo = {};
1739 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1740 commandBufferInfo.commandPool = mCommandPool.getHandle();
1741 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1742 commandBufferInfo.commandBufferCount = 1;
1743
Yuly Novikov27780292018-11-09 11:19:49 -05001744 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001745
1746 VkCommandBufferBeginInfo beginInfo = {};
1747 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1748 beginInfo.flags = 0;
1749 beginInfo.pInheritanceInfo = nullptr;
1750
Yuly Novikov27780292018-11-09 11:19:49 -05001751 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001752
1753 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1754 timestampQuery.getQuery(), 1);
1755 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1756 timestampQuery.getQueryPool()->getHandle(),
1757 timestampQuery.getQuery());
1758
Yuly Novikov27780292018-11-09 11:19:49 -05001759 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001760
1761 // Create fence for the submission
1762 VkFenceCreateInfo fenceInfo = {};
1763 fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1764 fenceInfo.flags = 0;
1765
1766 vk::Scoped<vk::Fence> fence(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001767 ANGLE_VK_TRY(context, fence.get().init(mDevice, fenceInfo));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001768
1769 // Submit the command buffer
1770 VkSubmitInfo submitInfo = {};
1771 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1772 submitInfo.waitSemaphoreCount = 0;
1773 submitInfo.pWaitSemaphores = nullptr;
1774 submitInfo.pWaitDstStageMask = nullptr;
1775 submitInfo.commandBufferCount = 1;
1776 submitInfo.pCommandBuffers = commandBuffer.ptr();
1777 submitInfo.signalSemaphoreCount = 0;
1778 submitInfo.pSignalSemaphores = nullptr;
1779
1780 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, fence.get().getHandle()));
1781
1782 // Wait for the submission to finish. Given no semaphores, there is hope that it would execute
1783 // in parallel with what's already running on the GPU.
Yuly Novikov27780292018-11-09 11:19:49 -05001784 ANGLE_VK_TRY(context, fence.get().wait(mDevice, kMaxFenceWaitTimeNs));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001785
1786 // Get the query results
1787 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
1788
Yuly Novikov27780292018-11-09 11:19:49 -05001789 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
1790 mDevice, timestampQuery.getQuery(), 1, sizeof(*timestampOut),
1791 timestampOut, sizeof(*timestampOut), queryFlags));
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001792
1793 timestampQueryPool.get().freeQuery(context, &timestampQuery);
1794
Shahbaz Youssefi5904ee32019-01-25 11:15:16 -05001795 // Convert results to nanoseconds.
1796 *timestampOut = static_cast<uint64_t>(
1797 *timestampOut * static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod));
1798
Jamie Madill7c985f52018-11-29 18:16:17 -05001799 return angle::Result::Continue;
Shahbaz Youssefi749589f2018-10-25 12:48:49 -04001800}
1801
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -05001802// These functions look at the mandatory format for support, and fallback to querying the device (if
1803// necessary) to test the availability of the bits.
1804bool RendererVk::hasLinearTextureFormatFeatureBits(VkFormat format,
1805 const VkFormatFeatureFlags featureBits)
1806{
1807 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1808}
1809
1810bool RendererVk::hasTextureFormatFeatureBits(VkFormat format,
1811 const VkFormatFeatureFlags featureBits)
1812{
1813 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1814}
1815
1816bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1817{
1818 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1819}
1820
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001821angle::Result RendererVk::synchronizeCpuGpuTime(vk::Context *context)
1822{
1823 ASSERT(mGpuEventsEnabled);
1824
1825 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1826 ASSERT(platform);
1827
1828 // To synchronize CPU and GPU times, we need to get the CPU timestamp as close as possible to
1829 // the GPU timestamp. The process of getting the GPU timestamp is as follows:
1830 //
1831 // CPU GPU
1832 //
1833 // Record command buffer
1834 // with timestamp query
1835 //
1836 // Submit command buffer
1837 //
1838 // Post-submission work Begin execution
1839 //
Yuly Novikov0546b532019-02-25 22:47:17 +00001840 // ???? Write timstamp Tgpu
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001841 //
1842 // ???? End execution
1843 //
1844 // ???? Return query results
1845 //
1846 // ????
1847 //
1848 // Get query results
1849 //
1850 // The areas of unknown work (????) on the CPU indicate that the CPU may or may not have
1851 // finished post-submission work while the GPU is executing in parallel. With no further work,
1852 // querying CPU timestamps before submission and after getting query results give the bounds to
1853 // Tgpu, which could be quite large.
1854 //
1855 // Using VkEvents, the GPU can be made to wait for the CPU and vice versa, in an effort to
1856 // reduce this range. This function implements the following procedure:
1857 //
1858 // CPU GPU
1859 //
1860 // Record command buffer
1861 // with timestamp query
1862 //
1863 // Submit command buffer
1864 //
1865 // Post-submission work Begin execution
1866 //
1867 // ???? Set Event GPUReady
1868 //
1869 // Wait on Event GPUReady Wait on Event CPUReady
1870 //
1871 // Get CPU Time Ts Wait on Event CPUReady
1872 //
1873 // Set Event CPUReady Wait on Event CPUReady
1874 //
1875 // Get CPU Time Tcpu Get GPU Time Tgpu
1876 //
1877 // Wait on Event GPUDone Set Event GPUDone
1878 //
1879 // Get CPU Time Te End Execution
1880 //
1881 // Idle Return query results
1882 //
1883 // Get query results
1884 //
1885 // If Te-Ts > epsilon, a GPU or CPU interruption can be assumed and the operation can be
1886 // retried. Once Te-Ts < epsilon, Tcpu can be taken to presumably match Tgpu. Finding an
1887 // epsilon that's valid for all devices may be difficult, so the loop can be performed only a
1888 // limited number of times and the Tcpu,Tgpu pair corresponding to smallest Te-Ts used for
1889 // calibration.
1890 //
1891 // Note: Once VK_EXT_calibrated_timestamps is ubiquitous, this should be redone.
1892
1893 // Make sure nothing is running
1894 ASSERT(mCommandGraph.empty());
1895
1896 TRACE_EVENT0("gpu.angle", "RendererVk::synchronizeCpuGpuTime");
1897
1898 // Create a query used to receive the GPU timestamp
1899 vk::QueryHelper timestampQuery;
1900 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &timestampQuery));
1901
1902 // Create the three events
1903 VkEventCreateInfo eventCreateInfo = {};
1904 eventCreateInfo.sType = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO;
1905 eventCreateInfo.flags = 0;
1906
1907 vk::Scoped<vk::Event> cpuReady(mDevice), gpuReady(mDevice), gpuDone(mDevice);
Yuly Novikov27780292018-11-09 11:19:49 -05001908 ANGLE_VK_TRY(context, cpuReady.get().init(mDevice, eventCreateInfo));
1909 ANGLE_VK_TRY(context, gpuReady.get().init(mDevice, eventCreateInfo));
1910 ANGLE_VK_TRY(context, gpuDone.get().init(mDevice, eventCreateInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001911
1912 constexpr uint32_t kRetries = 10;
1913
1914 // Time suffixes used are S for seconds and Cycles for cycles
1915 double tightestRangeS = 1e6f;
1916 double TcpuS = 0;
1917 uint64_t TgpuCycles = 0;
1918 for (uint32_t i = 0; i < kRetries; ++i)
1919 {
1920 // Reset the events
Yuly Novikov27780292018-11-09 11:19:49 -05001921 ANGLE_VK_TRY(context, cpuReady.get().reset(mDevice));
1922 ANGLE_VK_TRY(context, gpuReady.get().reset(mDevice));
1923 ANGLE_VK_TRY(context, gpuDone.get().reset(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001924
1925 // Record the command buffer
1926 vk::Scoped<vk::CommandBuffer> commandBatch(mDevice);
1927 vk::CommandBuffer &commandBuffer = commandBatch.get();
1928
1929 VkCommandBufferAllocateInfo commandBufferInfo = {};
1930 commandBufferInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1931 commandBufferInfo.commandPool = mCommandPool.getHandle();
1932 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1933 commandBufferInfo.commandBufferCount = 1;
1934
Yuly Novikov27780292018-11-09 11:19:49 -05001935 ANGLE_VK_TRY(context, commandBuffer.init(mDevice, commandBufferInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001936
1937 VkCommandBufferBeginInfo beginInfo = {};
1938 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1939 beginInfo.flags = 0;
1940 beginInfo.pInheritanceInfo = nullptr;
1941
Yuly Novikov27780292018-11-09 11:19:49 -05001942 ANGLE_VK_TRY(context, commandBuffer.begin(beginInfo));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001943
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -05001944 commandBuffer.setEvent(gpuReady.get().getHandle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001945 commandBuffer.waitEvents(1, cpuReady.get().ptr(), VK_PIPELINE_STAGE_HOST_BIT,
1946 VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, nullptr, 0, nullptr, 0,
1947 nullptr);
1948
1949 commandBuffer.resetQueryPool(timestampQuery.getQueryPool()->getHandle(),
1950 timestampQuery.getQuery(), 1);
1951 commandBuffer.writeTimestamp(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
1952 timestampQuery.getQueryPool()->getHandle(),
1953 timestampQuery.getQuery());
1954
Shahbaz Youssefi82fddcb2019-01-18 14:27:43 -05001955 commandBuffer.setEvent(gpuDone.get().getHandle(), VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001956
Yuly Novikov27780292018-11-09 11:19:49 -05001957 ANGLE_VK_TRY(context, commandBuffer.end());
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001958
1959 // Submit the command buffer
1960 angle::FixedVector<VkSemaphore, kMaxWaitSemaphores> waitSemaphores;
1961 angle::FixedVector<VkPipelineStageFlags, kMaxWaitSemaphores> waitStageMasks;
1962 getSubmitWaitSemaphores(context, &waitSemaphores, &waitStageMasks);
1963
1964 VkSubmitInfo submitInfo = {};
1965 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1966 submitInfo.waitSemaphoreCount = static_cast<uint32_t>(waitSemaphores.size());
1967 submitInfo.pWaitSemaphores = waitSemaphores.data();
1968 submitInfo.pWaitDstStageMask = waitStageMasks.data();
1969 submitInfo.commandBufferCount = 1;
1970 submitInfo.pCommandBuffers = commandBuffer.ptr();
1971 submitInfo.signalSemaphoreCount = 0;
1972 submitInfo.pSignalSemaphores = nullptr;
1973
1974 ANGLE_TRY(submitFrame(context, submitInfo, std::move(commandBuffer)));
1975
1976 // Wait for GPU to be ready. This is a short busy wait.
Yuly Novikov27780292018-11-09 11:19:49 -05001977 VkResult result = VK_EVENT_RESET;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001978 do
1979 {
Yuly Novikov27780292018-11-09 11:19:49 -05001980 result = gpuReady.get().getStatus(mDevice);
1981 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1982 {
1983 ANGLE_VK_TRY(context, result);
1984 }
1985 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001986
1987 double TsS = platform->monotonicallyIncreasingTime(platform);
1988
1989 // Tell the GPU to go ahead with the timestamp query.
Yuly Novikov27780292018-11-09 11:19:49 -05001990 ANGLE_VK_TRY(context, cpuReady.get().set(mDevice));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04001991 double cpuTimestampS = platform->monotonicallyIncreasingTime(platform);
1992
1993 // Wait for GPU to be done. Another short busy wait.
1994 do
1995 {
Yuly Novikov27780292018-11-09 11:19:49 -05001996 result = gpuDone.get().getStatus(mDevice);
1997 if (result != VK_EVENT_SET && result != VK_EVENT_RESET)
1998 {
1999 ANGLE_VK_TRY(context, result);
2000 }
2001 } while (result == VK_EVENT_RESET);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002002
2003 double TeS = platform->monotonicallyIncreasingTime(platform);
2004
2005 // Get the query results
2006 ANGLE_TRY(finishToSerial(context, getLastSubmittedQueueSerial()));
2007
2008 constexpr VkQueryResultFlags queryFlags = VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT;
2009
2010 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05002011 ANGLE_VK_TRY(context, timestampQuery.getQueryPool()->getResults(
2012 mDevice, timestampQuery.getQuery(), 1, sizeof(gpuTimestampCycles),
2013 &gpuTimestampCycles, sizeof(gpuTimestampCycles), queryFlags));
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002014
2015 // Use the first timestamp queried as origin.
2016 if (mGpuEventTimestampOrigin == 0)
2017 {
2018 mGpuEventTimestampOrigin = gpuTimestampCycles;
2019 }
2020
2021 // Take these CPU and GPU timestamps if there is better confidence.
2022 double confidenceRangeS = TeS - TsS;
2023 if (confidenceRangeS < tightestRangeS)
2024 {
2025 tightestRangeS = confidenceRangeS;
2026 TcpuS = cpuTimestampS;
2027 TgpuCycles = gpuTimestampCycles;
2028 }
2029 }
2030
2031 mGpuEventQueryPool.freeQuery(context, &timestampQuery);
2032
2033 // timestampPeriod gives nanoseconds/cycle.
2034 double TgpuS = (TgpuCycles - mGpuEventTimestampOrigin) *
2035 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) /
2036 1'000'000'000.0;
2037
2038 flushGpuEvents(TgpuS, TcpuS);
2039
2040 mGpuClockSync.gpuTimestampS = TgpuS;
2041 mGpuClockSync.cpuTimestampS = TcpuS;
2042
Jamie Madill7c985f52018-11-29 18:16:17 -05002043 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002044}
2045
2046angle::Result RendererVk::traceGpuEventImpl(vk::Context *context,
2047 vk::CommandBuffer *commandBuffer,
2048 char phase,
2049 const char *name)
2050{
2051 ASSERT(mGpuEventsEnabled);
2052
2053 GpuEventQuery event;
2054
2055 event.name = name;
2056 event.phase = phase;
2057 event.serial = mCurrentQueueSerial;
2058
2059 ANGLE_TRY(mGpuEventQueryPool.allocateQuery(context, &event.queryPoolIndex, &event.queryIndex));
2060
2061 commandBuffer->resetQueryPool(
2062 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex, 1);
2063 commandBuffer->writeTimestamp(
2064 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
2065 mGpuEventQueryPool.getQueryPool(event.queryPoolIndex)->getHandle(), event.queryIndex);
2066
2067 mInFlightGpuEventQueries.push_back(std::move(event));
2068
Jamie Madill7c985f52018-11-29 18:16:17 -05002069 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002070}
2071
2072angle::Result RendererVk::checkCompletedGpuEvents(vk::Context *context)
2073{
2074 ASSERT(mGpuEventsEnabled);
2075
2076 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
2077 ASSERT(platform);
2078
2079 int finishedCount = 0;
2080
2081 for (GpuEventQuery &eventQuery : mInFlightGpuEventQueries)
2082 {
2083 // Only check the timestamp query if the submission has finished.
2084 if (eventQuery.serial > mLastCompletedQueueSerial)
2085 {
2086 break;
2087 }
2088
2089 // See if the results are available.
2090 uint64_t gpuTimestampCycles = 0;
Yuly Novikov27780292018-11-09 11:19:49 -05002091 VkResult result = mGpuEventQueryPool.getQueryPool(eventQuery.queryPoolIndex)
2092 ->getResults(mDevice, eventQuery.queryIndex, 1,
2093 sizeof(gpuTimestampCycles), &gpuTimestampCycles,
2094 sizeof(gpuTimestampCycles), VK_QUERY_RESULT_64_BIT);
2095 if (result == VK_NOT_READY)
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002096 {
2097 break;
2098 }
Yuly Novikov27780292018-11-09 11:19:49 -05002099 ANGLE_VK_TRY(context, result);
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002100
2101 mGpuEventQueryPool.freeQuery(context, eventQuery.queryPoolIndex, eventQuery.queryIndex);
2102
2103 GpuEvent event;
2104 event.gpuTimestampCycles = gpuTimestampCycles;
2105 event.name = eventQuery.name;
2106 event.phase = eventQuery.phase;
2107
2108 mGpuEvents.emplace_back(event);
2109
2110 ++finishedCount;
2111 }
2112
2113 mInFlightGpuEventQueries.erase(mInFlightGpuEventQueries.begin(),
2114 mInFlightGpuEventQueries.begin() + finishedCount);
2115
Jamie Madill7c985f52018-11-29 18:16:17 -05002116 return angle::Result::Continue;
Shahbaz Youssefi25224e72018-10-22 11:56:02 -04002117}
2118
2119void RendererVk::flushGpuEvents(double nextSyncGpuTimestampS, double nextSyncCpuTimestampS)
2120{
2121 if (mGpuEvents.size() == 0)
2122 {
2123 return;
2124 }
2125
2126 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
2127 ASSERT(platform);
2128
2129 // Find the slope of the clock drift for adjustment
2130 double lastGpuSyncTimeS = mGpuClockSync.gpuTimestampS;
2131 double lastGpuSyncDiffS = mGpuClockSync.cpuTimestampS - mGpuClockSync.gpuTimestampS;
2132 double gpuSyncDriftSlope = 0;
2133
2134 double nextGpuSyncTimeS = nextSyncGpuTimestampS;
2135 double nextGpuSyncDiffS = nextSyncCpuTimestampS - nextSyncGpuTimestampS;
2136
2137 // No gpu trace events should have been generated before the clock sync, so if there is no
2138 // "previous" clock sync, there should be no gpu events (i.e. the function early-outs above).
2139 ASSERT(mGpuClockSync.gpuTimestampS != std::numeric_limits<double>::max() &&
2140 mGpuClockSync.cpuTimestampS != std::numeric_limits<double>::max());
2141
2142 gpuSyncDriftSlope =
2143 (nextGpuSyncDiffS - lastGpuSyncDiffS) / (nextGpuSyncTimeS - lastGpuSyncTimeS);
2144
2145 for (const GpuEvent &event : mGpuEvents)
2146 {
2147 double gpuTimestampS =
2148 (event.gpuTimestampCycles - mGpuEventTimestampOrigin) *
2149 static_cast<double>(mPhysicalDeviceProperties.limits.timestampPeriod) * 1e-9;
2150
2151 // Account for clock drift.
2152 gpuTimestampS += lastGpuSyncDiffS + gpuSyncDriftSlope * (gpuTimestampS - lastGpuSyncTimeS);
2153
2154 // Generate the trace now that the GPU timestamp is available and clock drifts are accounted
2155 // for.
2156 static long long eventId = 1;
2157 static const unsigned char *categoryEnabled =
2158 TRACE_EVENT_API_GET_CATEGORY_ENABLED("gpu.angle.gpu");
2159 platform->addTraceEvent(platform, event.phase, categoryEnabled, event.name, eventId++,
2160 gpuTimestampS, 0, nullptr, nullptr, nullptr, TRACE_EVENT_FLAG_NONE);
2161 }
2162
2163 mGpuEvents.clear();
2164}
2165
Shahbaz Youssefi96bd8fd2018-11-30 14:30:18 -05002166template <VkFormatFeatureFlags VkFormatProperties::*features>
2167bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
2168{
2169 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
2170 VkFormatProperties &deviceProperties = mFormatProperties[format];
2171
2172 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
2173 {
2174 // If we don't have the actual device features, see if the requested features are mandatory.
2175 // If so, there's no need to query the device.
2176 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
2177 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
2178 {
2179 return true;
2180 }
2181
2182 // Otherwise query the format features and cache it.
2183 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
2184 }
2185
2186 return IsMaskFlagSet(deviceProperties.*features, featureBits);
2187}
2188
Jamie Madillaaca96e2018-06-12 10:19:48 -04002189uint32_t GetUniformBufferDescriptorCount()
2190{
2191 return kUniformBufferDescriptorsPerDescriptorSet;
2192}
2193
Jamie Madill9e54b5a2016-05-25 12:57:39 -04002194} // namespace rx