blob: 60523ca5e3a840ae3d030cd5f7aa03859095c618 [file] [log] [blame]
Mark Lobodzinski6eda00a2016-02-02 15:55:36 -07001/* Copyright (c) 2015-2016 The Khronos Group Inc.
2 * Copyright (c) 2015-2016 Valve Corporation
3 * Copyright (c) 2015-2016 LunarG, Inc.
4 * Copyright (C) 2015-2016 Google Inc.
Tobin Ehlisd34a4c52015-12-08 10:50:10 -07005 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -06006 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
Tobin Ehlisd34a4c52015-12-08 10:50:10 -07009 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060010 * http://www.apache.org/licenses/LICENSE-2.0
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070011 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060012 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070017 *
18 * Author: Tobin Ehlis <tobine@google.com>
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060019 * Author: Mark Lobodzinski <mark@lunarg.com>
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070020 */
21
Jamie Madilldf5d5732016-04-04 11:54:43 -040022#include "vulkan/vulkan.h"
23
Petr Krause91f7a12017-12-14 20:57:36 +010024#include <unordered_map>
25#include <unordered_set>
26
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070027#include "vk_layer_data.h"
Tobin Ehlis8bb7c2f2016-02-10 15:38:45 -070028#include "vk_safe_struct.h"
Jon Ashburndc9111c2016-03-22 12:57:13 -060029#include "vk_layer_utils.h"
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060030#include "mutex"
31
32#pragma once
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070033
Chia-I Wucdb70962016-05-13 14:07:36 +080034namespace unique_objects {
35
Mark Lobodzinskifdf8f472016-04-28 16:36:58 -060036// All increments must be guarded by global_lock
37static uint64_t global_unique_id = 1;
38
Mark Lobodzinski4f3ce672017-03-03 10:28:21 -070039struct TEMPLATE_STATE {
40 VkDescriptorUpdateTemplateKHR desc_update_template;
41 safe_VkDescriptorUpdateTemplateCreateInfoKHR create_info;
42
43 TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo)
44 : desc_update_template(update_template), create_info(*pCreateInfo) {}
45};
46
Chris Forbes5279a8c2017-05-02 16:26:23 -070047struct instance_layer_data {
Chia-I Wu16570472016-05-17 07:57:15 +080048 VkInstance instance;
49
Mark Lobodzinski8a2305d2016-08-25 14:49:38 -060050 debug_report_data *report_data;
51 std::vector<VkDebugReportCallbackEXT> logging_callback;
Chris Forbes44c05302017-05-02 16:42:55 -070052 VkLayerInstanceDispatchTable dispatch_table = {};
Mark Lobodzinski8a2305d2016-08-25 14:49:38 -060053
54 // The following are for keeping track of the temporary callbacks that can
55 // be used in vkCreateInstance and vkDestroyInstance:
56 uint32_t num_tmp_callbacks;
57 VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos;
58 VkDebugReportCallbackEXT *tmp_callbacks;
59
Chris Forbes5279a8c2017-05-02 16:26:23 -070060 std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
Chris Forbes5279a8c2017-05-02 16:26:23 -070061};
62
63struct layer_data {
Chris Forbes7fcfde12017-05-02 16:54:24 -070064 instance_layer_data *instance_data;
Chris Forbes5279a8c2017-05-02 16:26:23 -070065
66 debug_report_data *report_data;
Chris Forbes44c05302017-05-02 16:42:55 -070067 VkLayerDispatchTable dispatch_table = {};
Chris Forbes5279a8c2017-05-02 16:26:23 -070068
Mark Lobodzinski4f3ce672017-03-03 10:28:21 -070069 std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_map;
70
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070071 bool wsi_enabled;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070072 std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
Tobin Ehlis10ba1de2016-04-13 12:59:43 -060073 VkPhysicalDevice gpu;
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070074
Petr Krause91f7a12017-12-14 20:57:36 +010075 struct SubpassesUsageStates {
76 std::unordered_set<uint32_t> subpasses_using_color_attachment;
77 std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
78 };
79 // uses unwrapped handles
80 std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
81
Mark Lobodzinski2eb39bc2018-02-16 11:24:21 -070082 // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
83 // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
84 std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
85
Mark Lobodzinskifdf8f472016-04-28 16:36:58 -060086 layer_data() : wsi_enabled(false), gpu(VK_NULL_HANDLE){};
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070087};
88
Chris Forbes5279a8c2017-05-02 16:26:23 -070089static std::unordered_map<void *, instance_layer_data *> instance_layer_data_map;
Jon Ashburn5484e0c2016-03-08 17:48:44 -070090static std::unordered_map<void *, layer_data *> layer_data_map;
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060091
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070092static std::mutex global_lock; // Protect map accesses and unique_id increments
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070093
Dustin Graves176f9df2016-07-14 17:28:11 -060094struct GenericHeader {
95 VkStructureType sType;
96 void *pNext;
97};
98
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070099template <typename T>
100bool ContainsExtStruct(const T *target, VkStructureType ext_type) {
Dustin Graves176f9df2016-07-14 17:28:11 -0600101 assert(target != nullptr);
102
103 const GenericHeader *ext_struct = reinterpret_cast<const GenericHeader *>(target->pNext);
104
105 while (ext_struct != nullptr) {
106 if (ext_struct->sType == ext_type) {
107 return true;
108 }
109
110 ext_struct = reinterpret_cast<const GenericHeader *>(ext_struct->pNext);
111 }
112
113 return false;
114}
115
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700116/* Unwrap a handle. */
117// must hold lock!
Dave Houltona9df0ce2018-02-07 10:51:23 -0700118template <typename HandleType, typename MapType>
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700119HandleType Unwrap(MapType *layer_data, HandleType wrappedHandle) {
120 // TODO: don't use operator[] here.
121 return (HandleType)layer_data->unique_id_mapping[reinterpret_cast<uint64_t const &>(wrappedHandle)];
122}
123
124/* Wrap a newly created handle with a new unique ID, and return the new ID. */
125// must hold lock!
Dave Houltona9df0ce2018-02-07 10:51:23 -0700126template <typename HandleType, typename MapType>
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700127HandleType WrapNew(MapType *layer_data, HandleType newlyCreatedHandle) {
128 auto unique_id = global_unique_id++;
129 layer_data->unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
130 return (HandleType)unique_id;
131}
132
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700133} // namespace unique_objects