blob: 27eb34f922640bfeef168d15799f505f6fe41452 [file] [log] [blame]
Mark Lobodzinski6eda00a2016-02-02 15:55:36 -07001/* Copyright (c) 2015-2016 The Khronos Group Inc.
2 * Copyright (c) 2015-2016 Valve Corporation
3 * Copyright (c) 2015-2016 LunarG, Inc.
4 * Copyright (C) 2015-2016 Google Inc.
Tobin Ehlisd34a4c52015-12-08 10:50:10 -07005 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -06006 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
Tobin Ehlisd34a4c52015-12-08 10:50:10 -07009 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060010 * http://www.apache.org/licenses/LICENSE-2.0
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070011 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060012 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070017 *
18 * Author: Tobin Ehlis <tobine@google.com>
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060019 * Author: Mark Lobodzinski <mark@lunarg.com>
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070020 */
21
Jamie Madilldf5d5732016-04-04 11:54:43 -040022#include "vulkan/vulkan.h"
23
Petr Krause91f7a12017-12-14 20:57:36 +010024#include <unordered_map>
25#include <unordered_set>
26
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070027#include "vk_layer_data.h"
Tobin Ehlis8bb7c2f2016-02-10 15:38:45 -070028#include "vk_safe_struct.h"
Jon Ashburndc9111c2016-03-22 12:57:13 -060029#include "vk_layer_utils.h"
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060030#include "mutex"
31
32#pragma once
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070033
Chia-I Wucdb70962016-05-13 14:07:36 +080034namespace unique_objects {
35
Mark Lobodzinskifdf8f472016-04-28 16:36:58 -060036// All increments must be guarded by global_lock
37static uint64_t global_unique_id = 1;
Mark Lobodzinskic7eda922018-02-28 13:38:45 -070038static std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
Mark Lobodzinskifdf8f472016-04-28 16:36:58 -060039
Mark Lobodzinski4f3ce672017-03-03 10:28:21 -070040struct TEMPLATE_STATE {
41 VkDescriptorUpdateTemplateKHR desc_update_template;
42 safe_VkDescriptorUpdateTemplateCreateInfoKHR create_info;
43
44 TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo)
45 : desc_update_template(update_template), create_info(*pCreateInfo) {}
46};
47
Chris Forbes5279a8c2017-05-02 16:26:23 -070048struct instance_layer_data {
Chia-I Wu16570472016-05-17 07:57:15 +080049 VkInstance instance;
50
Mark Lobodzinski8a2305d2016-08-25 14:49:38 -060051 debug_report_data *report_data;
52 std::vector<VkDebugReportCallbackEXT> logging_callback;
Chris Forbes44c05302017-05-02 16:42:55 -070053 VkLayerInstanceDispatchTable dispatch_table = {};
Mark Lobodzinski8a2305d2016-08-25 14:49:38 -060054
Mark Lobodzinskic7eda922018-02-28 13:38:45 -070055 // The following are for keeping track of the temporary callbacks that can be used in vkCreateInstance and vkDestroyInstance:
Mark Lobodzinski8a2305d2016-08-25 14:49:38 -060056 uint32_t num_tmp_callbacks;
57 VkDebugReportCallbackCreateInfoEXT *tmp_dbg_create_infos;
58 VkDebugReportCallbackEXT *tmp_callbacks;
Chris Forbes5279a8c2017-05-02 16:26:23 -070059};
60
61struct layer_data {
Chris Forbes7fcfde12017-05-02 16:54:24 -070062 instance_layer_data *instance_data;
Chris Forbes5279a8c2017-05-02 16:26:23 -070063
64 debug_report_data *report_data;
Chris Forbes44c05302017-05-02 16:42:55 -070065 VkLayerDispatchTable dispatch_table = {};
Chris Forbes5279a8c2017-05-02 16:26:23 -070066
Mark Lobodzinski4f3ce672017-03-03 10:28:21 -070067 std::unordered_map<uint64_t, std::unique_ptr<TEMPLATE_STATE>> desc_template_map;
68
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070069 bool wsi_enabled;
Tobin Ehlis10ba1de2016-04-13 12:59:43 -060070 VkPhysicalDevice gpu;
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070071
Petr Krause91f7a12017-12-14 20:57:36 +010072 struct SubpassesUsageStates {
73 std::unordered_set<uint32_t> subpasses_using_color_attachment;
74 std::unordered_set<uint32_t> subpasses_using_depthstencil_attachment;
75 };
Mark Lobodzinskic7eda922018-02-28 13:38:45 -070076 // Uses unwrapped handles
Petr Krause91f7a12017-12-14 20:57:36 +010077 std::unordered_map<VkRenderPass, SubpassesUsageStates> renderpasses_states;
78
Mark Lobodzinski2eb39bc2018-02-16 11:24:21 -070079 // Map of wrapped swapchain handles to arrays of wrapped swapchain image IDs
80 // Each swapchain has an immutable list of wrapped swapchain image IDs -- always return these IDs if they exist
81 std::unordered_map<VkSwapchainKHR, std::vector<VkImage>> swapchain_wrapped_image_handle_map;
82
Mark Lobodzinskifdf8f472016-04-28 16:36:58 -060083 layer_data() : wsi_enabled(false), gpu(VK_NULL_HANDLE){};
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070084};
85
Chris Forbes5279a8c2017-05-02 16:26:23 -070086static std::unordered_map<void *, instance_layer_data *> instance_layer_data_map;
Jon Ashburn5484e0c2016-03-08 17:48:44 -070087static std::unordered_map<void *, layer_data *> layer_data_map;
Mark Lobodzinskidc3bd852016-09-06 16:12:23 -060088
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070089static std::mutex global_lock; // Protect map accesses and unique_id increments
Tobin Ehlisd34a4c52015-12-08 10:50:10 -070090
Dustin Graves176f9df2016-07-14 17:28:11 -060091struct GenericHeader {
92 VkStructureType sType;
93 void *pNext;
94};
95
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070096template <typename T>
97bool ContainsExtStruct(const T *target, VkStructureType ext_type) {
Dustin Graves176f9df2016-07-14 17:28:11 -060098 assert(target != nullptr);
99
100 const GenericHeader *ext_struct = reinterpret_cast<const GenericHeader *>(target->pNext);
101
102 while (ext_struct != nullptr) {
103 if (ext_struct->sType == ext_type) {
104 return true;
105 }
106
107 ext_struct = reinterpret_cast<const GenericHeader *>(ext_struct->pNext);
108 }
109
110 return false;
111}
112
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700113/* Unwrap a handle. */
114// must hold lock!
Mark Lobodzinskic7eda922018-02-28 13:38:45 -0700115template <typename HandleType>
116HandleType Unwrap(HandleType wrappedHandle) {
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700117 // TODO: don't use operator[] here.
Mark Lobodzinskic7eda922018-02-28 13:38:45 -0700118 return (HandleType)unique_id_mapping[reinterpret_cast<uint64_t const &>(wrappedHandle)];
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700119}
120
Mark Lobodzinskic7eda922018-02-28 13:38:45 -0700121// Wrap a newly created handle with a new unique ID, and return the new ID -- must hold lock!
122template <typename HandleType>
123HandleType WrapNew(HandleType newlyCreatedHandle) {
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700124 auto unique_id = global_unique_id++;
Mark Lobodzinskic7eda922018-02-28 13:38:45 -0700125 unique_id_mapping[unique_id] = reinterpret_cast<uint64_t const &>(newlyCreatedHandle);
Chris Forbes1e2fdfa2017-05-02 18:18:01 -0700126 return (HandleType)unique_id;
127}
128
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700129} // namespace unique_objects