blob: e774e45ea18e759eb3f056c73c721f825a49d085 [file] [log] [blame]
Tony Barbour2f18b292016-02-25 15:44:10 -07001/*
2 * Copyright (C) 2016 Google, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included
12 * in all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
20 * DEALINGS IN THE SOFTWARE.
21 */
22
23#include <cassert>
24#include <array>
25#include <iostream>
26#include <string>
27#include <sstream>
28#include <set>
29#include "Helpers.h"
30#include "Shell.h"
31#include "Game.h"
32
33Shell::Shell(Game &game)
34 : game_(game), settings_(game.settings()), ctx_(),
35 game_tick_(1.0f / settings_.ticks_per_second), game_time_(game_tick_)
36{
37 // require generic WSI extensions
38 instance_extensions_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
39 device_extensions_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
40
41 // require "standard" validation layers
42 if (settings_.validate) {
43 device_layers_.push_back("VK_LAYER_LUNARG_standard_validation");
44 instance_layers_.push_back("VK_LAYER_LUNARG_standard_validation");
45
46 instance_extensions_.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
47 }
48}
49
50void Shell::log(LogPriority priority, const char *msg)
51{
52 std::ostream &st = (priority >= LOG_ERR) ? std::cerr : std::cout;
53 st << msg << "\n";
54}
55
56void Shell::init_vk()
57{
58 vk::init_dispatch_table_top(load_vk());
59
60 init_instance();
61 vk::init_dispatch_table_middle(ctx_.instance, false);
62
63 init_debug_report();
64 init_physical_dev();
65}
66
67void Shell::cleanup_vk()
68{
69 if (settings_.validate)
70 vk::DestroyDebugReportCallbackEXT(ctx_.instance, ctx_.debug_report, nullptr);
71
72 vk::DestroyInstance(ctx_.instance, nullptr);
73}
74
75bool Shell::debug_report_callback(VkDebugReportFlagsEXT flags,
76 VkDebugReportObjectTypeEXT obj_type,
77 uint64_t object,
78 size_t location,
79 int32_t msg_code,
80 const char *layer_prefix,
81 const char *msg)
82{
83 LogPriority prio = LOG_WARN;
84 if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
85 prio = LOG_ERR;
86 else if (flags & (VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT))
87 prio = LOG_WARN;
88 else if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
89 prio = LOG_INFO;
90 else if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
91 prio = LOG_DEBUG;
92
93 std::stringstream ss;
94 ss << layer_prefix << ": " << msg;
95
96 log(prio, ss.str().c_str());
97
98 return false;
99}
100
101void Shell::assert_all_instance_layers() const
102{
103 // enumerate instance layer
104 std::vector<VkLayerProperties> layers;
105 vk::enumerate(layers);
106
107 std::set<std::string> layer_names;
108 for (const auto &layer : layers)
109 layer_names.insert(layer.layerName);
110
111 // all listed instance layers are required
112 for (const auto &name : instance_layers_) {
113 if (layer_names.find(name) == layer_names.end()) {
114 std::stringstream ss;
115 ss << "instance layer " << name << " is missing";
116 throw std::runtime_error(ss.str());
117 }
118 }
119}
120
121void Shell::assert_all_instance_extensions() const
122{
123 // enumerate instance extensions
124 std::vector<VkExtensionProperties> exts;
125 vk::enumerate(nullptr, exts);
126
127 std::set<std::string> ext_names;
128 for (const auto &ext : exts)
129 ext_names.insert(ext.extensionName);
130
131 // all listed instance extensions are required
132 for (const auto &name : instance_extensions_) {
133 if (ext_names.find(name) == ext_names.end()) {
134 std::stringstream ss;
135 ss << "instance extension " << name << " is missing";
136 throw std::runtime_error(ss.str());
137 }
138 }
139}
140
141bool Shell::has_all_device_layers(VkPhysicalDevice phy) const
142{
143 // enumerate device layers
144 std::vector<VkLayerProperties> layers;
145 vk::enumerate(phy, layers);
146
147 std::set<std::string> layer_names;
148 for (const auto &layer : layers)
149 layer_names.insert(layer.layerName);
150
151 // all listed device layers are required
152 for (const auto &name : device_layers_) {
153 if (layer_names.find(name) == layer_names.end())
154 return false;
155 }
156
157 return true;
158}
159
160bool Shell::has_all_device_extensions(VkPhysicalDevice phy) const
161{
162 // enumerate device extensions
163 std::vector<VkExtensionProperties> exts;
164 vk::enumerate(phy, nullptr, exts);
165
166 std::set<std::string> ext_names;
167 for (const auto &ext : exts)
168 ext_names.insert(ext.extensionName);
169
170 // all listed device extensions are required
171 for (const auto &name : device_extensions_) {
172 if (ext_names.find(name) == ext_names.end())
173 return false;
174 }
175
176 return true;
177}
178
179void Shell::init_instance()
180{
181 assert_all_instance_layers();
182 assert_all_instance_extensions();
183
184 VkApplicationInfo app_info = {};
185 app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
186 app_info.pApplicationName = settings_.name.c_str();
187 app_info.applicationVersion = 0;
Jon Ashburnd3995c92016-03-22 13:57:46 -0600188 app_info.apiVersion = VK_API_VERSION_1_0;
Tony Barbour2f18b292016-02-25 15:44:10 -0700189
190 VkInstanceCreateInfo instance_info = {};
191 instance_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
192 instance_info.pApplicationInfo = &app_info;
193 instance_info.enabledLayerCount = static_cast<uint32_t>(instance_layers_.size());
194 instance_info.ppEnabledLayerNames = instance_layers_.data();
195 instance_info.enabledExtensionCount = static_cast<uint32_t>(instance_extensions_.size());
196 instance_info.ppEnabledExtensionNames = instance_extensions_.data();
197
198 vk::assert_success(vk::CreateInstance(&instance_info, nullptr, &ctx_.instance));
199}
200
201void Shell::init_debug_report()
202{
203 if (!settings_.validate)
204 return;
205
206 VkDebugReportCallbackCreateInfoEXT debug_report_info = {};
207 debug_report_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
208
209 debug_report_info.flags = VK_DEBUG_REPORT_WARNING_BIT_EXT |
210 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
211 VK_DEBUG_REPORT_ERROR_BIT_EXT;
212 if (settings_.validate_verbose) {
213 debug_report_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
214 VK_DEBUG_REPORT_DEBUG_BIT_EXT;
215 }
216
217 debug_report_info.pfnCallback = debug_report_callback;
218 debug_report_info.pUserData = reinterpret_cast<void *>(this);
219
220 vk::assert_success(vk::CreateDebugReportCallbackEXT(ctx_.instance,
221 &debug_report_info, nullptr, &ctx_.debug_report));
222}
223
224void Shell::init_physical_dev()
225{
226 // enumerate physical devices
227 std::vector<VkPhysicalDevice> phys;
228 vk::assert_success(vk::enumerate(ctx_.instance, phys));
229
230 ctx_.physical_dev = VK_NULL_HANDLE;
231 for (auto phy : phys) {
232 if (!has_all_device_layers(phy) || !has_all_device_extensions(phy))
233 continue;
234
235 // get queue properties
236 std::vector<VkQueueFamilyProperties> queues;
237 vk::get(phy, queues);
238
239 int game_queue_family = -1, present_queue_family = -1;
240 for (uint32_t i = 0; i < queues.size(); i++) {
241 const VkQueueFamilyProperties &q = queues[i];
242
243 // requires only GRAPHICS for game queues
244 const VkFlags game_queue_flags = VK_QUEUE_GRAPHICS_BIT;
245 if (game_queue_family < 0 &&
246 (q.queueFlags & game_queue_flags) == game_queue_flags)
247 game_queue_family = i;
248
249 // present queue must support the surface
250 if (present_queue_family < 0 && can_present(phy, i))
251 present_queue_family = i;
252
253 if (game_queue_family >= 0 && present_queue_family >= 0)
254 break;
255 }
256
257 if (game_queue_family >= 0 && present_queue_family >= 0) {
258 ctx_.physical_dev = phy;
259 ctx_.game_queue_family = game_queue_family;
260 ctx_.present_queue_family = present_queue_family;
261 break;
262 }
263 }
264
265 if (ctx_.physical_dev == VK_NULL_HANDLE)
266 throw std::runtime_error("failed to find any capable Vulkan physical device");
267}
268
269void Shell::create_context()
270{
271 create_dev();
272 vk::init_dispatch_table_bottom(ctx_.instance, ctx_.dev);
273
274 vk::GetDeviceQueue(ctx_.dev, ctx_.game_queue_family, 0, &ctx_.game_queue);
275 vk::GetDeviceQueue(ctx_.dev, ctx_.present_queue_family, 0, &ctx_.present_queue);
276
277 create_back_buffers();
278
279 // initialize ctx_.{surface,format} before attach_shell
280 create_swapchain();
281
282 game_.attach_shell(*this);
283}
284
285void Shell::destroy_context()
286{
287 if (ctx_.dev == VK_NULL_HANDLE)
288 return;
289
290 vk::DeviceWaitIdle(ctx_.dev);
291
292 destroy_swapchain();
293
294 game_.detach_shell();
295
296 destroy_back_buffers();
297
298 ctx_.game_queue = VK_NULL_HANDLE;
299 ctx_.present_queue = VK_NULL_HANDLE;
300
301 vk::DestroyDevice(ctx_.dev, nullptr);
302 ctx_.dev = VK_NULL_HANDLE;
303}
304
305void Shell::create_dev()
306{
307 VkDeviceCreateInfo dev_info = {};
308 dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
309
310 const std::vector<float> queue_priorities(settings_.queue_count, 0.0f);
311 std::array<VkDeviceQueueCreateInfo, 2> queue_info = {};
312 queue_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
313 queue_info[0].queueFamilyIndex = ctx_.game_queue_family;
314 queue_info[0].queueCount = settings_.queue_count;
315 queue_info[0].pQueuePriorities = queue_priorities.data();
316
317 if (ctx_.game_queue_family != ctx_.present_queue_family) {
318 queue_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
319 queue_info[1].queueFamilyIndex = ctx_.present_queue_family;
320 queue_info[1].queueCount = 1;
321 queue_info[1].pQueuePriorities = queue_priorities.data();
322
323 dev_info.queueCreateInfoCount = 2;
324 } else {
325 dev_info.queueCreateInfoCount = 1;
326 }
327
328 dev_info.pQueueCreateInfos = queue_info.data();
329
330 dev_info.enabledLayerCount = static_cast<uint32_t>(device_layers_.size());
331 dev_info.ppEnabledLayerNames = device_layers_.data();
332 dev_info.enabledExtensionCount = static_cast<uint32_t>(device_extensions_.size());
333 dev_info.ppEnabledExtensionNames = device_extensions_.data();
334
335 // disable all features
336 VkPhysicalDeviceFeatures features = {};
337 dev_info.pEnabledFeatures = &features;
338
339 vk::assert_success(vk::CreateDevice(ctx_.physical_dev, &dev_info, nullptr, &ctx_.dev));
340}
341
342void Shell::create_back_buffers()
343{
344 VkSemaphoreCreateInfo sem_info = {};
345 sem_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
346
347 VkFenceCreateInfo fence_info = {};
348 fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
349 fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
350
351 // BackBuffer is used to track which swapchain image and its associated
352 // sync primitives are busy. Having more BackBuffer's than swapchain
353 // images may allows us to replace CPU wait on present_fence by GPU wait
354 // on acquire_semaphore.
355 const int count = settings_.back_buffer_count + 1;
356 for (int i = 0; i < count; i++) {
357 BackBuffer buf = {};
358 vk::assert_success(vk::CreateSemaphore(ctx_.dev, &sem_info, nullptr, &buf.acquire_semaphore));
359 vk::assert_success(vk::CreateSemaphore(ctx_.dev, &sem_info, nullptr, &buf.render_semaphore));
360 vk::assert_success(vk::CreateFence(ctx_.dev, &fence_info, nullptr, &buf.present_fence));
361
362 ctx_.back_buffers.push(buf);
363 }
364}
365
366void Shell::destroy_back_buffers()
367{
368 while (!ctx_.back_buffers.empty()) {
369 const auto &buf = ctx_.back_buffers.front();
370
371 vk::DestroySemaphore(ctx_.dev, buf.acquire_semaphore, nullptr);
372 vk::DestroySemaphore(ctx_.dev, buf.render_semaphore, nullptr);
373 vk::DestroyFence(ctx_.dev, buf.present_fence, nullptr);
374
375 ctx_.back_buffers.pop();
376 }
377}
378
379void Shell::create_swapchain()
380{
381 ctx_.surface = create_surface(ctx_.instance);
382
383 VkBool32 supported;
384 vk::assert_success(vk::GetPhysicalDeviceSurfaceSupportKHR(ctx_.physical_dev,
385 ctx_.present_queue_family, ctx_.surface, &supported));
386 // this should be guaranteed by the platform-specific can_present call
387 assert(supported);
388
389 std::vector<VkSurfaceFormatKHR> formats;
390 vk::get(ctx_.physical_dev, ctx_.surface, formats);
391 ctx_.format = formats[0];
392
393 // defer to resize_swapchain()
394 ctx_.swapchain = VK_NULL_HANDLE;
395 ctx_.extent.width = (uint32_t) -1;
396 ctx_.extent.height = (uint32_t) -1;
397}
398
399void Shell::destroy_swapchain()
400{
401 if (ctx_.swapchain != VK_NULL_HANDLE) {
402 game_.detach_swapchain();
403
404 vk::DestroySwapchainKHR(ctx_.dev, ctx_.swapchain, nullptr);
405 ctx_.swapchain = VK_NULL_HANDLE;
406 }
407
408 vk::DestroySurfaceKHR(ctx_.instance, ctx_.surface, nullptr);
409 ctx_.surface = VK_NULL_HANDLE;
410}
411
412void Shell::resize_swapchain(uint32_t width_hint, uint32_t height_hint)
413{
414 VkSurfaceCapabilitiesKHR caps;
415 vk::assert_success(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(ctx_.physical_dev,
416 ctx_.surface, &caps));
417
418 VkExtent2D extent = caps.currentExtent;
419 // use the hints
420 if (extent.width == (uint32_t) -1) {
421 extent.width = width_hint;
422 extent.height = height_hint;
423 }
424 // clamp width; to protect us from broken hints?
425 if (extent.width < caps.minImageExtent.width)
426 extent.width = caps.minImageExtent.width;
427 else if (extent.width > caps.maxImageExtent.width)
428 extent.width = caps.maxImageExtent.width;
429 // clamp height
430 if (extent.height < caps.minImageExtent.height)
431 extent.height = caps.minImageExtent.height;
432 else if (extent.height > caps.maxImageExtent.height)
433 extent.height = caps.maxImageExtent.height;
434
435 if (ctx_.extent.width == extent.width && ctx_.extent.height == extent.height)
436 return;
437
438 uint32_t image_count = settings_.back_buffer_count;
439 if (image_count < caps.minImageCount)
440 image_count = caps.minImageCount;
441 else if (image_count > caps.maxImageCount)
442 image_count = caps.maxImageCount;
443
444 assert(caps.supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
445 assert(caps.supportedTransforms & caps.currentTransform);
446 assert(caps.supportedCompositeAlpha & (VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
447 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR));
448 VkCompositeAlphaFlagBitsKHR composite_alpha =
449 (caps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) ?
450 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR : VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
451
452 std::vector<VkPresentModeKHR> modes;
453 vk::get(ctx_.physical_dev, ctx_.surface, modes);
454
455 // FIFO is the only mode universally supported
456 VkPresentModeKHR mode = VK_PRESENT_MODE_FIFO_KHR;
457 for (auto m : modes) {
458 if ((settings_.vsync && m == VK_PRESENT_MODE_MAILBOX_KHR) ||
459 (!settings_.vsync && m == VK_PRESENT_MODE_IMMEDIATE_KHR)) {
460 mode = m;
461 break;
462 }
463 }
464
465 VkSwapchainCreateInfoKHR swapchain_info = {};
466 swapchain_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
467 swapchain_info.surface = ctx_.surface;
468 swapchain_info.minImageCount = image_count;
469 swapchain_info.imageFormat = ctx_.format.format;
470 swapchain_info.imageColorSpace = ctx_.format.colorSpace;
471 swapchain_info.imageExtent = extent;
472 swapchain_info.imageArrayLayers = 1;
473 swapchain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
474
475 std::vector<uint32_t> queue_families(1, ctx_.game_queue_family);
476 if (ctx_.game_queue_family != ctx_.present_queue_family) {
477 queue_families.push_back(ctx_.present_queue_family);
478
479 swapchain_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
480 swapchain_info.queueFamilyIndexCount = (uint32_t)queue_families.size();
481 swapchain_info.pQueueFamilyIndices = queue_families.data();
482 } else {
483 swapchain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
484 }
485
486 swapchain_info.preTransform = caps.currentTransform;;
487 swapchain_info.compositeAlpha = composite_alpha;
488 swapchain_info.presentMode = mode;
489 swapchain_info.clipped = true;
490 swapchain_info.oldSwapchain = ctx_.swapchain;
491
492 vk::assert_success(vk::CreateSwapchainKHR(ctx_.dev, &swapchain_info, nullptr, &ctx_.swapchain));
493 ctx_.extent = extent;
494
495 // destroy the old swapchain
496 if (swapchain_info.oldSwapchain != VK_NULL_HANDLE) {
497 game_.detach_swapchain();
498
499 vk::DeviceWaitIdle(ctx_.dev);
500 vk::DestroySwapchainKHR(ctx_.dev, swapchain_info.oldSwapchain, nullptr);
501 }
502
503 game_.attach_swapchain();
504}
505
506void Shell::add_game_time(float time)
507{
508 int max_ticks = 3;
509
510 if (!settings_.no_tick)
511 game_time_ += time;
512
513 while (game_time_ >= game_tick_ && max_ticks--) {
514 game_.on_tick();
515 game_time_ -= game_tick_;
516 }
517}
518
519void Shell::acquire_back_buffer()
520{
521 // acquire just once when not presenting
522 if (settings_.no_present &&
523 ctx_.acquired_back_buffer.acquire_semaphore != VK_NULL_HANDLE)
524 return;
525
526 auto &buf = ctx_.back_buffers.front();
527
528 // wait until acquire and render semaphores are waited/unsignaled
529 vk::assert_success(vk::WaitForFences(ctx_.dev, 1, &buf.present_fence,
530 true, UINT64_MAX));
531 // reset the fence
532 vk::assert_success(vk::ResetFences(ctx_.dev, 1, &buf.present_fence));
533
534 vk::assert_success(vk::AcquireNextImageKHR(ctx_.dev, ctx_.swapchain,
535 UINT64_MAX, buf.acquire_semaphore, VK_NULL_HANDLE,
536 &buf.image_index));
537
538 ctx_.acquired_back_buffer = buf;
539 ctx_.back_buffers.pop();
540}
541
542void Shell::present_back_buffer()
543{
544 const auto &buf = ctx_.acquired_back_buffer;
545
546 if (!settings_.no_render)
547 game_.on_frame(game_time_ / game_tick_);
548
549 if (settings_.no_present) {
550 fake_present();
551 return;
552 }
553
554 VkPresentInfoKHR present_info = {};
555 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
556 present_info.waitSemaphoreCount = 1;
557 present_info.pWaitSemaphores = (settings_.no_render) ?
558 &buf.acquire_semaphore : &buf.render_semaphore;
559 present_info.swapchainCount = 1;
560 present_info.pSwapchains = &ctx_.swapchain;
561 present_info.pImageIndices = &buf.image_index;
562
563 vk::assert_success(vk::QueuePresentKHR(ctx_.present_queue, &present_info));
564
565 vk::assert_success(vk::QueueSubmit(ctx_.present_queue, 0, nullptr, buf.present_fence));
566 ctx_.back_buffers.push(buf);
567}
568
569void Shell::fake_present()
570{
571 const auto &buf = ctx_.acquired_back_buffer;
572
573 assert(settings_.no_present);
574
575 // wait render semaphore and signal acquire semaphore
576 if (!settings_.no_render) {
577 VkPipelineStageFlags stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
578 VkSubmitInfo submit_info = {};
579 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
580 submit_info.waitSemaphoreCount = 1;
581 submit_info.pWaitSemaphores = &buf.render_semaphore;
582 submit_info.pWaitDstStageMask = &stage;
583 submit_info.signalSemaphoreCount = 1;
584 submit_info.pSignalSemaphores = &buf.acquire_semaphore;
585 vk::assert_success(vk::QueueSubmit(ctx_.game_queue, 1, &submit_info, VK_NULL_HANDLE));
586 }
587
588 // push the buffer back just once for Shell::cleanup_vk
589 if (buf.acquire_semaphore != ctx_.back_buffers.back().acquire_semaphore)
590 ctx_.back_buffers.push(buf);
591}