Mark Lobodzinski | 6eda00a | 2016-02-02 15:55:36 -0700 | [diff] [blame] | 1 | /* Copyright (c) 2015-2016 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2016 Valve Corporation |
| 3 | * Copyright (c) 2015-2016 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2016 Google Inc. |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 5 | * |
Jon Ashburn | 3ebf125 | 2016-04-19 11:30:31 -0600 | [diff] [blame] | 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 9 | * |
Jon Ashburn | 3ebf125 | 2016-04-19 11:30:31 -0600 | [diff] [blame] | 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 11 | * |
Jon Ashburn | 3ebf125 | 2016-04-19 11:30:31 -0600 | [diff] [blame] | 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 17 | * |
| 18 | * Author: Tobin Ehlis <tobine@google.com> |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 19 | * Author: Mark Lobodzinski <mark@lunarg.com> |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 20 | */ |
| 21 | |
Jamie Madill | df5d573 | 2016-04-04 11:54:43 -0400 | [diff] [blame] | 22 | #include "vulkan/vulkan.h" |
| 23 | |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 24 | #include <unordered_map> |
| 25 | #include <unordered_set> |
| 26 | |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 27 | #include "vk_layer_data.h" |
Tobin Ehlis | 8bb7c2f | 2016-02-10 15:38:45 -0700 | [diff] [blame] | 28 | #include "vk_safe_struct.h" |
Jon Ashburn | dc9111c | 2016-03-22 12:57:13 -0600 | [diff] [blame] | 29 | #include "vk_layer_utils.h" |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 30 | #include "mutex" |
| 31 | |
| 32 | #pragma once |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 33 | |
Chia-I Wu | cdb7096 | 2016-05-13 14:07:36 +0800 | [diff] [blame] | 34 | namespace unique_objects { |
| 35 | |
Mark Lobodzinski | fdf8f47 | 2016-04-28 16:36:58 -0600 | [diff] [blame] | 36 | // All increments must be guarded by global_lock |
| 37 | static uint64_t global_unique_id = 1; |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 38 | static std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle |
Mark Lobodzinski | fdf8f47 | 2016-04-28 16:36:58 -0600 | [diff] [blame] | 39 | |
Mark Lobodzinski | 4f3ce67 | 2017-03-03 10:28:21 -0700 | [diff] [blame] | 40 | struct TEMPLATE_STATE { |
| 41 | VkDescriptorUpdateTemplateKHR desc_update_template; |
| 42 | safe_VkDescriptorUpdateTemplateCreateInfoKHR create_info; |
| 43 | |
| 44 | TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo) |
| 45 | : desc_update_template(update_template), create_info(*pCreateInfo) {} |
| 46 | }; |
| 47 | |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 48 | struct instance_layer_data { |
Chia-I Wu | 1657047 | 2016-05-17 07:57:15 +0800 | [diff] [blame] | 49 | VkInstance instance; |
| 50 | |
Mark Lobodzinski | 8a2305d | 2016-08-25 14:49:38 -0600 | [diff] [blame] | 51 | debug_report_data *report_data; |
| 52 | std::vector<VkDebugReportCallbackEXT> logging_callback; |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 53 | VkLayerInstanceDispatchTable dispatch_table = {}; |
Mark Lobodzinski | 8a2305d | 2016-08-25 14:49:38 -0600 | [diff] [blame] | 54 | |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 55 | // The following are for keeping track of the temporary callbacks that can be used in vkCreateInstance and vkDestroyInstance: |
Mark Lobodzinski | 8a2305d | 2016-08-25 14:49:38 -0600 | [diff] [blame] | 56 | uint32_t num_tmp_callbacks; |
| 57 | VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos; |
| 58 | VkDebugReportCallbackEXT *tmp_callbacks; |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 59 | }; |
| 60 | |
| 61 | struct layer_data { |
Chris Forbes | 7fcfde1 | 2017-05-02 16:54:24 -0700 | [diff] [blame] | 62 | instance_layer_data *instance_data; |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 63 | |
| 64 | debug_report_data *report_data; |
Chris Forbes | 44c0530 | 2017-05-02 16:42:55 -0700 | [diff] [blame] | 65 | VkLayerDispatchTable dispatch_table = {}; |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 66 | |
Mark Lobodzinski | 4f3ce67 | 2017-03-03 10:28:21 -0700 | [diff] [blame] | 67 | std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_map; |
| 68 | |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 69 | bool wsi_enabled; |
Tobin Ehlis | 10ba1de | 2016-04-13 12:59:43 -0600 | [diff] [blame] | 70 | VkPhysicalDevice gpu; |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 71 | |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 72 | struct SubpassesUsageStates { |
| 73 | std::unordered_set<uint32_t> subpasses_using_color_attachment; |
| 74 | std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment; |
| 75 | }; |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 76 | // Uses unwrapped handles |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 77 | std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states; |
| 78 | |
Mark Lobodzinski | 2eb39bc | 2018-02-16 11:24:21 -0700 | [diff] [blame] | 79 | // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs |
| 80 | // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist |
| 81 | std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map; |
| 82 | |
Mark Lobodzinski | fdf8f47 | 2016-04-28 16:36:58 -0600 | [diff] [blame] | 83 | layer_data() : wsi_enabled(false), gpu(VK_NULL_HANDLE){}; |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 84 | }; |
| 85 | |
Chris Forbes | 5279a8c | 2017-05-02 16:26:23 -0700 | [diff] [blame] | 86 | static std::unordered_map<void *, instance_layer_data *> instance_layer_data_map; |
Jon Ashburn | 5484e0c | 2016-03-08 17:48:44 -0700 | [diff] [blame] | 87 | static std::unordered_map<void *, layer_data *> layer_data_map; |
Mark Lobodzinski | dc3bd85 | 2016-09-06 16:12:23 -0600 | [diff] [blame] | 88 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 89 | static std::mutex global_lock; // Protect map accesses and unique_id increments |
Tobin Ehlis | d34a4c5 | 2015-12-08 10:50:10 -0700 | [diff] [blame] | 90 | |
Dustin Graves | 176f9df | 2016-07-14 17:28:11 -0600 | [diff] [blame] | 91 | struct GenericHeader { |
| 92 | VkStructureType sType; |
| 93 | void *pNext; |
| 94 | }; |
| 95 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 96 | template <typename T> |
| 97 | bool ContainsExtStruct(const T *target, VkStructureType ext_type) { |
Dustin Graves | 176f9df | 2016-07-14 17:28:11 -0600 | [diff] [blame] | 98 | assert(target != nullptr); |
| 99 | |
| 100 | const GenericHeader *ext_struct = reinterpret_cast<const GenericHeader *>(target->pNext); |
| 101 | |
| 102 | while (ext_struct != nullptr) { |
| 103 | if (ext_struct->sType == ext_type) { |
| 104 | return true; |
| 105 | } |
| 106 | |
| 107 | ext_struct = reinterpret_cast<const GenericHeader *>(ext_struct->pNext); |
| 108 | } |
| 109 | |
| 110 | return false; |
| 111 | } |
| 112 | |
Chris Forbes | 1e2fdfa | 2017-05-02 18:18:01 -0700 | [diff] [blame] | 113 | /* Unwrap a handle. */ |
| 114 | // must hold lock! |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 115 | template <typename HandleType> |
| 116 | HandleType Unwrap(HandleType wrappedHandle) { |
Chris Forbes | 1e2fdfa | 2017-05-02 18:18:01 -0700 | [diff] [blame] | 117 | // TODO: don't use operator[] here. |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 118 | return (HandleType)unique_id_mapping[reinterpret_cast<uint64_t const &>(wrappedHandle)]; |
Chris Forbes | 1e2fdfa | 2017-05-02 18:18:01 -0700 | [diff] [blame] | 119 | } |
| 120 | |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 121 | // Wrap a newly created handle with a new unique ID, and return the new ID -- must hold lock! |
| 122 | template <typename HandleType> |
| 123 | HandleType WrapNew(HandleType newlyCreatedHandle) { |
Chris Forbes | 1e2fdfa | 2017-05-02 18:18:01 -0700 | [diff] [blame] | 124 | auto unique_id = global_unique_id++; |
Mark Lobodzinski | c7eda92 | 2018-02-28 13:38:45 -0700 | [diff] [blame^] | 125 | unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle); |
Chris Forbes | 1e2fdfa | 2017-05-02 18:18:01 -0700 | [diff] [blame] | 126 | return (HandleType)unique_id; |
| 127 | } |
| 128 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 129 | } // namespace unique_objects |