Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 1 | /* Copyright (c) 2015-2016 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2016 Valve Corporation |
| 3 | * Copyright (c) 2015-2016 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2016 Google Inc. |
| 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Courtney Goeltzenleuchter <courtneygo@google.com> |
| 19 | * Author: Tobin Ehlis <tobine@google.com> |
| 20 | * Author: Chris Forbes <chrisf@ijw.co.nz> |
| 21 | * Author: Mark Lobodzinski <mark@lunarg.com> |
| 22 | */ |
| 23 | #ifndef CORE_VALIDATION_TYPES_H_ |
| 24 | #define CORE_VALIDATION_TYPES_H_ |
| 25 | |
John Zulauf | f0d0639 | 2018-02-16 13:07:24 -0700 | [diff] [blame] | 26 | #include "hash_vk_types.h" |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 27 | #include "vk_safe_struct.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 28 | #include "vulkan/vulkan.h" |
Mark Lobodzinski | f8d5448 | 2017-01-17 13:09:40 -0700 | [diff] [blame] | 29 | #include "vk_validation_error_messages.h" |
Mark Lobodzinski | 90224de | 2017-01-26 15:23:11 -0700 | [diff] [blame] | 30 | #include "vk_layer_logging.h" |
Mark Lobodzinski | 3382637 | 2017-04-13 11:10:11 -0600 | [diff] [blame] | 31 | #include "vk_object_types.h" |
Mark Lobodzinski | 28426ae | 2017-06-01 07:56:38 -0600 | [diff] [blame] | 32 | #include "vk_extension_helper.h" |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 33 | #include <atomic> |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 34 | #include <functional> |
Tobin Ehlis | cebc4c0 | 2016-08-22 10:10:43 -0600 | [diff] [blame] | 35 | #include <map> |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 36 | #include <string.h> |
| 37 | #include <unordered_map> |
| 38 | #include <unordered_set> |
| 39 | #include <vector> |
Mark Lobodzinski | 9ef5d56 | 2017-01-27 12:28:30 -0700 | [diff] [blame] | 40 | #include <memory> |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 41 | #include <list> |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 42 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 43 | // Fwd declarations -- including descriptor_set.h creates an ugly include loop |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 44 | namespace cvdescriptorset { |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 45 | class DescriptorSetLayoutDef; |
Tobin Ehlis | 815e813 | 2016-06-02 13:02:17 -0600 | [diff] [blame] | 46 | class DescriptorSetLayout; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 47 | class DescriptorSet; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 48 | } // namespace cvdescriptorset |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 49 | |
Tobin Ehlis | 965cd8a | 2016-06-24 14:41:20 -0600 | [diff] [blame] | 50 | struct GLOBAL_CB_NODE; |
| 51 | |
Mark Lobodzinski | 5c6f9ef | 2017-06-09 16:35:43 -0600 | [diff] [blame] | 52 | enum CALL_STATE { |
| 53 | UNCALLED, // Function has not been called |
| 54 | QUERY_COUNT, // Function called once to query a count |
| 55 | QUERY_DETAILS, // Function called w/ a count to query details |
| 56 | }; |
| 57 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 58 | class BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 59 | public: |
Tobin Ehlis | 08edee6 | 2016-06-23 17:03:56 -0600 | [diff] [blame] | 60 | // Track when object is being used by an in-flight command buffer |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 61 | std::atomic_int in_use; |
Tobin Ehlis | 08edee6 | 2016-06-23 17:03:56 -0600 | [diff] [blame] | 62 | // Track command buffers that this object is bound to |
| 63 | // binding initialized when cmd referencing object is bound to command buffer |
| 64 | // binding removed when command buffer is reset or destroyed |
| 65 | // When an object is destroyed, any bound cbs are set to INVALID |
Tobin Ehlis | 965cd8a | 2016-06-24 14:41:20 -0600 | [diff] [blame] | 66 | std::unordered_set<GLOBAL_CB_NODE *> cb_bindings; |
Tobin Ehlis | 2f85ff5 | 2016-09-07 15:18:08 -0600 | [diff] [blame] | 67 | |
| 68 | BASE_NODE() { in_use.store(0); }; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 69 | }; |
| 70 | |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 71 | // Track command pools and their command buffers |
| 72 | struct COMMAND_POOL_NODE : public BASE_NODE { |
| 73 | VkCommandPoolCreateFlags createFlags; |
| 74 | uint32_t queueFamilyIndex; |
John Zulauf | c51840b | 2017-11-02 18:12:49 -0600 | [diff] [blame] | 75 | // Cmd buffers allocated from this pool |
| 76 | std::unordered_set<VkCommandBuffer> commandBuffers; |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 77 | }; |
| 78 | |
Tobin Ehlis | 2556f5b | 2016-06-24 17:22:16 -0600 | [diff] [blame] | 79 | // Generic wrapper for vulkan objects |
| 80 | struct VK_OBJECT { |
| 81 | uint64_t handle; |
Mark Lobodzinski | 3382637 | 2017-04-13 11:10:11 -0600 | [diff] [blame] | 82 | VulkanObjectType type; |
Tobin Ehlis | 2556f5b | 2016-06-24 17:22:16 -0600 | [diff] [blame] | 83 | }; |
| 84 | |
Tobin Ehlis | 96f1d60 | 2016-07-08 12:33:45 -0600 | [diff] [blame] | 85 | inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; } |
| 86 | |
| 87 | namespace std { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 88 | template <> |
| 89 | struct hash<VK_OBJECT> { |
Tobin Ehlis | 96f1d60 | 2016-07-08 12:33:45 -0600 | [diff] [blame] | 90 | size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); } |
| 91 | }; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 92 | } // namespace std |
Tobin Ehlis | 96f1d60 | 2016-07-08 12:33:45 -0600 | [diff] [blame] | 93 | |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 94 | class PHYS_DEV_PROPERTIES_NODE { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 95 | public: |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 96 | VkPhysicalDeviceProperties properties; |
| 97 | std::vector<VkQueueFamilyProperties> queue_family_properties; |
| 98 | }; |
| 99 | |
Chris Forbes | bff35d5 | 2016-07-25 18:11:50 +1200 | [diff] [blame] | 100 | // Flags describing requirements imposed by the pipeline on a descriptor. These |
| 101 | // can't be checked at pipeline creation time as they depend on the Image or |
| 102 | // ImageView bound. |
| 103 | enum descriptor_req { |
| 104 | DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D, |
| 105 | DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY, |
| 106 | DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D, |
| 107 | DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY, |
| 108 | DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D, |
| 109 | DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE, |
| 110 | DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, |
| 111 | |
Chris Forbes | 65bf810 | 2016-08-31 12:00:35 -0700 | [diff] [blame] | 112 | DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_END_RANGE + 1)) - 1, |
| 113 | |
Chris Forbes | bff35d5 | 2016-07-25 18:11:50 +1200 | [diff] [blame] | 114 | DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_END_RANGE, |
| 115 | DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1, |
| 116 | }; |
| 117 | |
Tobin Ehlis | bd711bd | 2016-10-12 14:27:30 -0600 | [diff] [blame] | 118 | struct DESCRIPTOR_POOL_STATE : BASE_NODE { |
Tobin Ehlis | 7c701c0 | 2016-05-26 11:20:13 -0600 | [diff] [blame] | 119 | VkDescriptorPool pool; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 120 | uint32_t maxSets; // Max descriptor sets allowed in this pool |
| 121 | uint32_t availableSets; // Available descriptor sets in this pool |
Tobin Ehlis | 7c701c0 | 2016-05-26 11:20:13 -0600 | [diff] [blame] | 122 | |
Chris Forbes | f566aae | 2017-04-24 16:59:02 -0700 | [diff] [blame] | 123 | safe_VkDescriptorPoolCreateInfo createInfo; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 124 | std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool |
| 125 | std::vector<uint32_t> maxDescriptorTypeCount; // Max # of descriptors of each type in this pool |
| 126 | std::vector<uint32_t> availableDescriptorTypeCount; // Available # of descriptors of each type in this pool |
Tobin Ehlis | 7c701c0 | 2016-05-26 11:20:13 -0600 | [diff] [blame] | 127 | |
Tobin Ehlis | bd711bd | 2016-10-12 14:27:30 -0600 | [diff] [blame] | 128 | DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 129 | : pool(pool), |
| 130 | maxSets(pCreateInfo->maxSets), |
| 131 | availableSets(pCreateInfo->maxSets), |
Chris Forbes | f566aae | 2017-04-24 16:59:02 -0700 | [diff] [blame] | 132 | createInfo(pCreateInfo), |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 133 | maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0), |
| 134 | availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) { |
Chris Forbes | f566aae | 2017-04-24 16:59:02 -0700 | [diff] [blame] | 135 | // Collect maximums per descriptor type. |
| 136 | for (uint32_t i = 0; i < createInfo.poolSizeCount; ++i) { |
| 137 | uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type); |
| 138 | // Same descriptor types can appear several times |
| 139 | maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount; |
| 140 | availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex]; |
Tobin Ehlis | 7c701c0 | 2016-05-26 11:20:13 -0600 | [diff] [blame] | 141 | } |
| 142 | } |
Tobin Ehlis | 7c701c0 | 2016-05-26 11:20:13 -0600 | [diff] [blame] | 143 | }; |
| 144 | |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 145 | // Generic memory binding struct to track objects bound to objects |
| 146 | struct MEM_BINDING { |
| 147 | VkDeviceMemory mem; |
| 148 | VkDeviceSize offset; |
| 149 | VkDeviceSize size; |
| 150 | }; |
| 151 | |
| 152 | inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; } |
| 153 | |
| 154 | namespace std { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 155 | template <> |
| 156 | struct hash<MEM_BINDING> { |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 157 | size_t operator()(MEM_BINDING mb) const NOEXCEPT { |
| 158 | auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset); |
| 159 | return intermediate ^ hash<uint64_t>()(mb.size); |
| 160 | } |
| 161 | }; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 162 | } // namespace std |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 163 | |
Tobin Ehlis | 530bb0a | 2016-11-16 08:57:22 -0700 | [diff] [blame] | 164 | // Superclass for bindable object state (currently images and buffers) |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 165 | class BINDABLE : public BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 166 | public: |
| 167 | bool sparse; // Is this object being bound with sparse memory or not? |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 168 | // Non-sparse binding data |
| 169 | MEM_BINDING binding; |
Tobin Ehlis | 530bb0a | 2016-11-16 08:57:22 -0700 | [diff] [blame] | 170 | // Memory requirements for this BINDABLE |
| 171 | VkMemoryRequirements requirements; |
Tobin Ehlis | 8c18970 | 2016-11-17 13:39:57 -0700 | [diff] [blame] | 172 | // bool to track if memory requirements were checked |
| 173 | bool memory_requirements_checked; |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 174 | // Sparse binding data, initially just tracking MEM_BINDING per mem object |
| 175 | // There's more data for sparse bindings so need better long-term solution |
| 176 | // TODO : Need to update solution to track all sparse binding data |
| 177 | std::unordered_set<MEM_BINDING> sparse_bindings; |
John Zulauf | 4c7b552 | 2017-12-15 14:35:06 -0700 | [diff] [blame] | 178 | |
| 179 | std::unordered_set<VkDeviceMemory> bound_memory_set_; |
| 180 | |
| 181 | BINDABLE() |
| 182 | : sparse(false), binding{}, requirements{}, memory_requirements_checked(false), sparse_bindings{}, bound_memory_set_{} {}; |
| 183 | |
| 184 | // Update the cached set of memory bindings. |
| 185 | // Code that changes binding.mem or sparse_bindings must call UpdateBoundMemorySet() |
| 186 | void UpdateBoundMemorySet() { |
| 187 | bound_memory_set_.clear(); |
Tobin Ehlis | 640a81c | 2016-11-15 15:37:18 -0700 | [diff] [blame] | 188 | if (!sparse) { |
John Zulauf | 4c7b552 | 2017-12-15 14:35:06 -0700 | [diff] [blame] | 189 | bound_memory_set_.insert(binding.mem); |
Tobin Ehlis | 640a81c | 2016-11-15 15:37:18 -0700 | [diff] [blame] | 190 | } else { |
| 191 | for (auto sb : sparse_bindings) { |
John Zulauf | 4c7b552 | 2017-12-15 14:35:06 -0700 | [diff] [blame] | 192 | bound_memory_set_.insert(sb.mem); |
Tobin Ehlis | 640a81c | 2016-11-15 15:37:18 -0700 | [diff] [blame] | 193 | } |
| 194 | } |
Tobin Ehlis | 640a81c | 2016-11-15 15:37:18 -0700 | [diff] [blame] | 195 | } |
John Zulauf | 4c7b552 | 2017-12-15 14:35:06 -0700 | [diff] [blame] | 196 | |
| 197 | // Return unordered set of memory objects that are bound |
| 198 | // Instead of creating a set from scratch each query, return the cached one |
| 199 | const std::unordered_set<VkDeviceMemory> &GetBoundMemory() const { return bound_memory_set_; } |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 200 | }; |
| 201 | |
Tobin Ehlis | 4668dce | 2016-11-16 09:30:23 -0700 | [diff] [blame] | 202 | class BUFFER_STATE : public BINDABLE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 203 | public: |
Tobin Ehlis | 08edee6 | 2016-06-23 17:03:56 -0600 | [diff] [blame] | 204 | VkBuffer buffer; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 205 | VkBufferCreateInfo createInfo; |
Tobin Ehlis | 4668dce | 2016-11-16 09:30:23 -0700 | [diff] [blame] | 206 | BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) { |
Tony Barbour | 00bafef | 2017-04-26 13:59:35 -0600 | [diff] [blame] | 207 | if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) { |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 208 | uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount]; |
| 209 | for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) { |
| 210 | pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i]; |
| 211 | } |
| 212 | createInfo.pQueueFamilyIndices = pQueueFamilyIndices; |
| 213 | } |
| 214 | |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 215 | if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) { |
| 216 | sparse = true; |
| 217 | } |
| 218 | }; |
Chris Forbes | da914b6 | 2016-09-22 18:51:58 +1200 | [diff] [blame] | 219 | |
Tobin Ehlis | 4668dce | 2016-11-16 09:30:23 -0700 | [diff] [blame] | 220 | BUFFER_STATE(BUFFER_STATE const &rh_obj) = delete; |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 221 | |
| 222 | ~BUFFER_STATE() { |
Tony Barbour | 00bafef | 2017-04-26 13:59:35 -0600 | [diff] [blame] | 223 | if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 224 | delete[] createInfo.pQueueFamilyIndices; |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 225 | createInfo.pQueueFamilyIndices = nullptr; |
| 226 | } |
| 227 | }; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 228 | }; |
| 229 | |
Tobin Ehlis | 8b87246 | 2016-09-14 08:12:08 -0600 | [diff] [blame] | 230 | class BUFFER_VIEW_STATE : public BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 231 | public: |
Tobin Ehlis | 8b87246 | 2016-09-14 08:12:08 -0600 | [diff] [blame] | 232 | VkBufferView buffer_view; |
| 233 | VkBufferViewCreateInfo create_info; |
Tobin Ehlis | 8b87246 | 2016-09-14 08:12:08 -0600 | [diff] [blame] | 234 | BUFFER_VIEW_STATE(VkBufferView bv, const VkBufferViewCreateInfo *ci) : buffer_view(bv), create_info(*ci){}; |
Chris Forbes | fd52b2c | 2016-09-26 15:23:32 +1300 | [diff] [blame] | 235 | BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete; |
Tobin Ehlis | 8b87246 | 2016-09-14 08:12:08 -0600 | [diff] [blame] | 236 | }; |
| 237 | |
Tobin Ehlis | fad7adf | 2016-10-20 06:50:37 -0600 | [diff] [blame] | 238 | struct SAMPLER_STATE : public BASE_NODE { |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 239 | VkSampler sampler; |
| 240 | VkSamplerCreateInfo createInfo; |
| 241 | |
Tobin Ehlis | fad7adf | 2016-10-20 06:50:37 -0600 | [diff] [blame] | 242 | SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci){}; |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 243 | }; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 244 | |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 245 | class IMAGE_STATE : public BINDABLE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 246 | public: |
Tobin Ehlis | 6b9c945 | 2016-06-28 14:52:11 -0600 | [diff] [blame] | 247 | VkImage image; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 248 | VkImageCreateInfo createInfo; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 249 | bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO |
| 250 | bool acquired; // If this is a swapchain image, has it been acquired by the app. |
| 251 | bool shared_presentable; // True for a front-buffered swapchain image |
| 252 | bool layout_locked; // A front-buffered image that has been presented can never have layout transitioned |
| 253 | bool get_sparse_reqs_called; // Track if GetImageSparseMemoryRequirements() has been called for this image |
Tobin Ehlis | 7eee148 | 2018-02-08 11:19:10 -0700 | [diff] [blame] | 254 | bool sparse_metadata_required; // Track if sparse metadata aspect is required for this image |
| 255 | bool sparse_metadata_bound; // Track if sparse metadata aspect is bound to this image |
| 256 | std::vector<VkSparseImageMemoryRequirements> sparse_requirements; |
Tobin Ehlis | 30df15c | 2016-10-12 17:17:57 -0600 | [diff] [blame] | 257 | IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo) |
Tobin Ehlis | 7eee148 | 2018-02-08 11:19:10 -0700 | [diff] [blame] | 258 | : image(img), |
| 259 | createInfo(*pCreateInfo), |
| 260 | valid(false), |
| 261 | acquired(false), |
| 262 | shared_presentable(false), |
| 263 | layout_locked(false), |
| 264 | get_sparse_reqs_called(false), |
Tobin Ehlis | 880f4c9 | 2018-02-08 14:10:35 -0700 | [diff] [blame] | 265 | sparse_metadata_required(false), |
| 266 | sparse_metadata_bound(false), |
Tobin Ehlis | 7eee148 | 2018-02-08 11:19:10 -0700 | [diff] [blame] | 267 | sparse_requirements{} { |
Tony Barbour | 00bafef | 2017-04-26 13:59:35 -0600 | [diff] [blame] | 268 | if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) { |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 269 | uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount]; |
| 270 | for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) { |
| 271 | pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i]; |
| 272 | } |
| 273 | createInfo.pQueueFamilyIndices = pQueueFamilyIndices; |
| 274 | } |
| 275 | |
Tobin Ehlis | 5410827 | 2016-10-11 14:26:49 -0600 | [diff] [blame] | 276 | if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) { |
| 277 | sparse = true; |
| 278 | } |
| 279 | }; |
Chris Forbes | da914b6 | 2016-09-22 18:51:58 +1200 | [diff] [blame] | 280 | |
Tobin Ehlis | 30df15c | 2016-10-12 17:17:57 -0600 | [diff] [blame] | 281 | IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete; |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 282 | |
| 283 | ~IMAGE_STATE() { |
Tony Barbour | 00bafef | 2017-04-26 13:59:35 -0600 | [diff] [blame] | 284 | if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 285 | delete[] createInfo.pQueueFamilyIndices; |
Peter Lohrmann | 1f0ab14 | 2017-03-17 16:58:14 -0700 | [diff] [blame] | 286 | createInfo.pQueueFamilyIndices = nullptr; |
| 287 | } |
| 288 | }; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 289 | }; |
| 290 | |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 291 | class IMAGE_VIEW_STATE : public BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 292 | public: |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 293 | VkImageView image_view; |
| 294 | VkImageViewCreateInfo create_info; |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 295 | IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci) : image_view(iv), create_info(*ci){}; |
Chris Forbes | fd52b2c | 2016-09-26 15:23:32 +1300 | [diff] [blame] | 296 | IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete; |
Tobin Ehlis | 8b26a38 | 2016-09-14 08:02:49 -0600 | [diff] [blame] | 297 | }; |
| 298 | |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 299 | struct MemRange { |
| 300 | VkDeviceSize offset; |
| 301 | VkDeviceSize size; |
| 302 | }; |
| 303 | |
| 304 | struct MEMORY_RANGE { |
| 305 | uint64_t handle; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 306 | bool image; // True for image, false for buffer |
| 307 | bool linear; // True for buffers and linear images |
| 308 | bool valid; // True if this range is know to be valid |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 309 | VkDeviceMemory memory; |
| 310 | VkDeviceSize start; |
Tobin Ehlis | 12a4b5e | 2016-08-08 12:33:11 -0600 | [diff] [blame] | 311 | VkDeviceSize size; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 312 | VkDeviceSize end; // Store this pre-computed for simplicity |
Tobin Ehlis | 12a4b5e | 2016-08-08 12:33:11 -0600 | [diff] [blame] | 313 | // Set of ptrs to every range aliased with this one |
| 314 | std::unordered_set<MEMORY_RANGE *> aliases; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 315 | }; |
| 316 | |
| 317 | // Data struct for tracking memory object |
Tobin Ehlis | 83e14ca | 2016-09-14 11:21:55 -0600 | [diff] [blame] | 318 | struct DEVICE_MEM_INFO : public BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 319 | void *object; // Dispatchable object used to create this memory (device of swapchain) |
Mike Schuchardt | a8d1a25 | 2017-10-26 14:05:52 -0600 | [diff] [blame] | 320 | bool global_valid; // If allocation is mapped or external, set to "true" to be picked up by subsequently bound ranges |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 321 | VkDeviceMemory mem; |
Tobin Ehlis | 06d54a1 | 2016-08-04 08:03:32 -0600 | [diff] [blame] | 322 | VkMemoryAllocateInfo alloc_info; |
John Zulauf | 5d79b82 | 2018-03-02 09:13:09 -0700 | [diff] [blame] | 323 | bool is_dedicated; |
| 324 | VkBuffer dedicated_buffer; |
| 325 | VkImage dedicated_image; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 326 | std::unordered_set<VK_OBJECT> obj_bindings; // objects bound to this memory |
| 327 | std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges; // Map of object to its binding range |
Tobin Ehlis | 12a4b5e | 2016-08-08 12:33:11 -0600 | [diff] [blame] | 328 | // Convenience vectors image/buff handles to speed up iterating over images or buffers independently |
| 329 | std::unordered_set<uint64_t> bound_images; |
| 330 | std::unordered_set<uint64_t> bound_buffers; |
Tobin Ehlis | bf8ac34 | 2016-08-04 07:53:46 -0600 | [diff] [blame] | 331 | |
Tobin Ehlis | 06d54a1 | 2016-08-04 08:03:32 -0600 | [diff] [blame] | 332 | MemRange mem_range; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 333 | void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space |
| 334 | void *shadow_copy; // Pointer to start of guard-band data before mapped region |
| 335 | uint64_t shadow_pad_size; // Size of the guard-band data before and after actual data. It MUST be a |
| 336 | // multiple of limits.minMemoryMapAlignment |
| 337 | void *p_driver_data; // Pointer to application's actual memory |
Mark Lobodzinski | 066b842 | 2016-08-15 14:27:26 -0600 | [diff] [blame] | 338 | |
Tobin Ehlis | 997b258 | 2016-06-02 08:43:37 -0600 | [diff] [blame] | 339 | DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info) |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 340 | : object(disp_object), |
| 341 | global_valid(false), |
| 342 | mem(in_mem), |
| 343 | alloc_info(*p_alloc_info), |
John Zulauf | 5d79b82 | 2018-03-02 09:13:09 -0700 | [diff] [blame] | 344 | is_dedicated(false), |
| 345 | dedicated_buffer(VK_NULL_HANDLE), |
| 346 | dedicated_image(VK_NULL_HANDLE), |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 347 | mem_range{}, |
| 348 | shadow_copy_base(0), |
| 349 | shadow_copy(0), |
| 350 | shadow_pad_size(0), |
| 351 | p_driver_data(0){}; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 352 | }; |
| 353 | |
| 354 | class SWAPCHAIN_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 355 | public: |
Chris Forbes | 0b52ddf | 2016-10-03 14:12:59 +1300 | [diff] [blame] | 356 | safe_VkSwapchainCreateInfoKHR createInfo; |
Chris Forbes | e06305e | 2016-10-06 11:07:30 +1300 | [diff] [blame] | 357 | VkSwapchainKHR swapchain; |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 358 | std::vector<VkImage> images; |
Chris Forbes | 8a047d0 | 2016-12-09 10:39:26 +1300 | [diff] [blame] | 359 | bool replaced = false; |
Tobin Ehlis | bb03e5f | 2017-05-11 08:52:51 -0600 | [diff] [blame] | 360 | bool shared_presentable = false; |
Mark Lobodzinski | 5c6f9ef | 2017-06-09 16:35:43 -0600 | [diff] [blame] | 361 | CALL_STATE vkGetSwapchainImagesKHRState = UNCALLED; |
| 362 | uint32_t get_swapchain_image_count = 0; |
Chris Forbes | e06305e | 2016-10-06 11:07:30 +1300 | [diff] [blame] | 363 | SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain) |
| 364 | : createInfo(pCreateInfo), swapchain(swapchain) {} |
Tobin Ehlis | 0a43bde | 2016-05-03 08:31:08 -0600 | [diff] [blame] | 365 | }; |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 366 | |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 367 | class IMAGE_CMD_BUF_LAYOUT_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 368 | public: |
Chris Forbes | da914b6 | 2016-09-22 18:51:58 +1200 | [diff] [blame] | 369 | IMAGE_CMD_BUF_LAYOUT_NODE() = default; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 370 | IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput) |
| 371 | : initialLayout(initialLayoutInput), layout(layoutInput) {} |
| 372 | |
| 373 | VkImageLayout initialLayout; |
| 374 | VkImageLayout layout; |
| 375 | }; |
| 376 | |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 377 | // Store the DAG. |
| 378 | struct DAGNode { |
| 379 | uint32_t pass; |
| 380 | std::vector<uint32_t> prev; |
| 381 | std::vector<uint32_t> next; |
| 382 | }; |
| 383 | |
Tobin Ehlis | 95ccf3e | 2016-10-12 15:24:03 -0600 | [diff] [blame] | 384 | struct RENDER_PASS_STATE : public BASE_NODE { |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 385 | VkRenderPass renderPass; |
Chris Forbes | ef73046 | 2016-09-27 12:03:31 +1300 | [diff] [blame] | 386 | safe_VkRenderPassCreateInfo createInfo; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 387 | std::vector<bool> hasSelfDependency; |
| 388 | std::vector<DAGNode> subpassToNode; |
Chris Forbes | 4ea3d49 | 2017-10-12 12:25:00 -0700 | [diff] [blame] | 389 | std::vector<int32_t> subpass_to_dependency_index; // srcSubpass to dependency index of self dep, or -1 if none |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 390 | std::unordered_map<uint32_t, bool> attachment_first_read; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 391 | |
Tobin Ehlis | 95ccf3e | 2016-10-12 15:24:03 -0600 | [diff] [blame] | 392 | RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) : createInfo(pCreateInfo) {} |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 393 | }; |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 394 | |
John Zulauf | 52aef1f | 2018-01-30 09:24:38 -0700 | [diff] [blame] | 395 | // vkCmd tracking -- complete as of header 1.0.68 |
| 396 | // please keep in "none, then sorted" order |
| 397 | // Note: grepping vulkan.h for VKAPI_CALL.*vkCmd will return all functions except vkEndCommandBuffer |
| 398 | |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 399 | enum CMD_TYPE { |
Chris Forbes | e46e0a1 | 2016-12-20 11:33:11 +1300 | [diff] [blame] | 400 | CMD_NONE, |
John Zulauf | 52aef1f | 2018-01-30 09:24:38 -0700 | [diff] [blame] | 401 | CMD_BEGINQUERY, |
| 402 | CMD_BEGINRENDERPASS, |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 403 | CMD_BINDDESCRIPTORSETS, |
| 404 | CMD_BINDINDEXBUFFER, |
John Zulauf | 52aef1f | 2018-01-30 09:24:38 -0700 | [diff] [blame] | 405 | CMD_BINDPIPELINE, |
| 406 | CMD_BINDVERTEXBUFFERS, |
| 407 | CMD_BLITIMAGE, |
| 408 | CMD_CLEARATTACHMENTS, |
| 409 | CMD_CLEARCOLORIMAGE, |
| 410 | CMD_CLEARDEPTHSTENCILIMAGE, |
| 411 | CMD_COPYBUFFER, |
| 412 | CMD_COPYBUFFERTOIMAGE, |
| 413 | CMD_COPYIMAGE, |
| 414 | CMD_COPYIMAGETOBUFFER, |
| 415 | CMD_COPYQUERYPOOLRESULTS, |
| 416 | CMD_DEBUGMARKERBEGINEXT, |
| 417 | CMD_DEBUGMARKERENDEXT, |
| 418 | CMD_DEBUGMARKERINSERTEXT, |
| 419 | CMD_DISPATCH, |
| 420 | CMD_DISPATCHBASEKHX, |
| 421 | CMD_DISPATCHINDIRECT, |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 422 | CMD_DRAW, |
| 423 | CMD_DRAWINDEXED, |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 424 | CMD_DRAWINDEXEDINDIRECT, |
Tobin Ehlis | 54741e1 | 2017-09-06 14:28:45 -0600 | [diff] [blame] | 425 | CMD_DRAWINDEXEDINDIRECTCOUNTAMD, |
John Zulauf | 52aef1f | 2018-01-30 09:24:38 -0700 | [diff] [blame] | 426 | CMD_DRAWINDIRECT, |
| 427 | CMD_DRAWINDIRECTCOUNTAMD, |
| 428 | CMD_ENDCOMMANDBUFFER, // Should be the last command in any RECORDED cmd buffer |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 429 | CMD_ENDQUERY, |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 430 | CMD_ENDRENDERPASS, |
| 431 | CMD_EXECUTECOMMANDS, |
John Zulauf | 52aef1f | 2018-01-30 09:24:38 -0700 | [diff] [blame] | 432 | CMD_FILLBUFFER, |
| 433 | CMD_NEXTSUBPASS, |
| 434 | CMD_PIPELINEBARRIER, |
| 435 | CMD_PROCESSCOMMANDSNVX, |
| 436 | CMD_PUSHCONSTANTS, |
| 437 | CMD_PUSHDESCRIPTORSETKHR, |
| 438 | CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR, |
| 439 | CMD_RESERVESPACEFORCOMMANDSNVX, |
| 440 | CMD_RESETEVENT, |
| 441 | CMD_RESETQUERYPOOL, |
| 442 | CMD_RESOLVEIMAGE, |
| 443 | CMD_SETBLENDCONSTANTS, |
| 444 | CMD_SETDEPTHBIAS, |
| 445 | CMD_SETDEPTHBOUNDS, |
| 446 | CMD_SETDEVICEMASKKHX, |
| 447 | CMD_SETDISCARDRECTANGLEEXT, |
| 448 | CMD_SETEVENT, |
| 449 | CMD_SETLINEWIDTH, |
| 450 | CMD_SETSAMPLELOCATIONSEXT, |
| 451 | CMD_SETSCISSOR, |
| 452 | CMD_SETSTENCILCOMPAREMASK, |
| 453 | CMD_SETSTENCILREFERENCE, |
| 454 | CMD_SETSTENCILWRITEMASK, |
| 455 | CMD_SETVIEWPORT, |
| 456 | CMD_SETVIEWPORTWSCALINGNV, |
| 457 | CMD_UPDATEBUFFER, |
| 458 | CMD_WAITEVENTS, |
| 459 | CMD_WRITETIMESTAMP, |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 460 | }; |
| 461 | |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 462 | enum CB_STATE { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 463 | CB_NEW, // Newly created CB w/o any cmds |
| 464 | CB_RECORDING, // BeginCB has been called on this CB |
| 465 | CB_RECORDED, // EndCB has been called on this CB |
| 466 | CB_INVALID_COMPLETE, // had a complete recording, but was since invalidated |
| 467 | CB_INVALID_INCOMPLETE, // fouled before recording was completed |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 468 | }; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 469 | |
| 470 | // CB Status -- used to track status of various bindings on cmd buffer objects |
| 471 | typedef VkFlags CBStatusFlags; |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 472 | enum CBStatusFlagBits { |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 473 | // clang-format off |
| 474 | CBSTATUS_NONE = 0x00000000, // No status is set |
Chris Forbes | b2ba95b | 2016-09-16 17:11:50 +1200 | [diff] [blame] | 475 | CBSTATUS_LINE_WIDTH_SET = 0x00000001, // Line width has been set |
| 476 | CBSTATUS_DEPTH_BIAS_SET = 0x00000002, // Depth bias has been set |
| 477 | CBSTATUS_BLEND_CONSTANTS_SET = 0x00000004, // Blend constants state has been set |
| 478 | CBSTATUS_DEPTH_BOUNDS_SET = 0x00000008, // Depth bounds state object has been set |
| 479 | CBSTATUS_STENCIL_READ_MASK_SET = 0x00000010, // Stencil read mask has been set |
| 480 | CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020, // Stencil write mask has been set |
| 481 | CBSTATUS_STENCIL_REFERENCE_SET = 0x00000040, // Stencil reference has been set |
Chris Forbes | 8440162 | 2017-08-16 12:56:01 -0700 | [diff] [blame] | 482 | CBSTATUS_VIEWPORT_SET = 0x00000080, |
| 483 | CBSTATUS_SCISSOR_SET = 0x00000100, |
| 484 | CBSTATUS_INDEX_BUFFER_BOUND = 0x00000200, // Index buffer has been set |
| 485 | CBSTATUS_ALL_STATE_SET = 0x000001FF, // All state set (intentionally exclude index buffer) |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 486 | // clang-format on |
Mark Lobodzinski | 0978f5f | 2016-05-19 17:23:38 -0600 | [diff] [blame] | 487 | }; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 488 | |
Mark Lobodzinski | 6dc35f9 | 2017-03-07 16:37:16 -0700 | [diff] [blame] | 489 | struct TEMPLATE_STATE { |
| 490 | VkDescriptorUpdateTemplateKHR desc_update_template; |
Mark Lobodzinski | ef4da50 | 2017-09-28 15:18:18 -0600 | [diff] [blame] | 491 | safe_VkDescriptorUpdateTemplateCreateInfo create_info; |
Mark Lobodzinski | 6dc35f9 | 2017-03-07 16:37:16 -0700 | [diff] [blame] | 492 | |
Mark Lobodzinski | ef4da50 | 2017-09-28 15:18:18 -0600 | [diff] [blame] | 493 | TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo) |
Mark Lobodzinski | 6dc35f9 | 2017-03-07 16:37:16 -0700 | [diff] [blame] | 494 | : desc_update_template(update_template), create_info(*pCreateInfo) {} |
| 495 | }; |
| 496 | |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 497 | struct QueryObject { |
| 498 | VkQueryPool pool; |
| 499 | uint32_t index; |
| 500 | }; |
| 501 | |
| 502 | inline bool operator==(const QueryObject &query1, const QueryObject &query2) { |
| 503 | return (query1.pool == query2.pool && query1.index == query2.index); |
| 504 | } |
| 505 | |
| 506 | namespace std { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 507 | template <> |
| 508 | struct hash<QueryObject> { |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 509 | size_t operator()(QueryObject query) const throw() { |
| 510 | return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index); |
| 511 | } |
| 512 | }; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 513 | } // namespace std |
Mark Lobodzinski | 729a8d3 | 2017-01-26 12:16:30 -0700 | [diff] [blame] | 514 | struct DRAW_DATA { |
| 515 | std::vector<VkBuffer> buffers; |
| 516 | }; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 517 | |
| 518 | struct ImageSubresourcePair { |
| 519 | VkImage image; |
| 520 | bool hasSubresource; |
| 521 | VkImageSubresource subresource; |
| 522 | }; |
| 523 | |
| 524 | inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 525 | if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource) return false; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 526 | return !img1.hasSubresource || |
| 527 | (img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel && |
| 528 | img1.subresource.arrayLayer == img2.subresource.arrayLayer); |
| 529 | } |
| 530 | |
| 531 | namespace std { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 532 | template <> |
| 533 | struct hash<ImageSubresourcePair> { |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 534 | size_t operator()(ImageSubresourcePair img) const throw() { |
| 535 | size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image)); |
| 536 | hashVal ^= hash<bool>()(img.hasSubresource); |
| 537 | if (img.hasSubresource) { |
| 538 | hashVal ^= hash<uint32_t>()(reinterpret_cast<uint32_t &>(img.subresource.aspectMask)); |
| 539 | hashVal ^= hash<uint32_t>()(img.subresource.mipLevel); |
| 540 | hashVal ^= hash<uint32_t>()(img.subresource.arrayLayer); |
| 541 | } |
| 542 | return hashVal; |
| 543 | } |
| 544 | }; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 545 | } // namespace std |
Tobin Ehlis | ad4a2da | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 546 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 547 | // Canonical dictionary for PushConstantRanges |
| 548 | using PushConstantRangesDict = hash_util::Dictionary<PushConstantRanges>; |
| 549 | using PushConstantRangesId = PushConstantRangesDict::Id; |
| 550 | |
| 551 | // Canonical dictionary for the pipeline layout's layout of descriptorsetlayouts |
| 552 | using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef; |
| 553 | using DescriptorSetLayoutId = std::shared_ptr<const DescriptorSetLayoutDef>; |
John Zulauf | df3c5c1 | 2018-03-06 16:44:43 -0700 | [diff] [blame] | 554 | using PipelineLayoutSetLayoutsDef = std::vector<DescriptorSetLayoutId>; |
| 555 | using PipelineLayoutSetLayoutsDict = |
| 556 | hash_util::Dictionary<PipelineLayoutSetLayoutsDef, hash_util::IsOrderedContainer<PipelineLayoutSetLayoutsDef>>; |
| 557 | using PipelineLayoutSetLayoutsId = PipelineLayoutSetLayoutsDict::Id; |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 558 | |
| 559 | // Defines/stores a compatibility defintion for set N |
| 560 | // The "layout layout" must store at least set+1 entries, but only the first set+1 are considered for hash and equality testing |
| 561 | // Note: the "cannonical" data are referenced by Id, not including handle or device specific state |
| 562 | // Note: hash and equality only consider layout_id entries [0, set] for determining uniqueness |
| 563 | struct PipelineLayoutCompatDef { |
| 564 | uint32_t set; |
| 565 | PushConstantRangesId push_constant_ranges; |
John Zulauf | df3c5c1 | 2018-03-06 16:44:43 -0700 | [diff] [blame] | 566 | PipelineLayoutSetLayoutsId set_layouts_id; |
| 567 | PipelineLayoutCompatDef(const uint32_t set_index, const PushConstantRangesId pcr_id, const PipelineLayoutSetLayoutsId sl_id) |
| 568 | : set(set_index), push_constant_ranges(pcr_id), set_layouts_id(sl_id) {} |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 569 | size_t hash() const; |
| 570 | bool operator==(const PipelineLayoutCompatDef &other) const; |
| 571 | }; |
| 572 | |
| 573 | // Canonical dictionary for PipelineLayoutCompat records |
| 574 | using PipelineLayoutCompatDict = hash_util::Dictionary<PipelineLayoutCompatDef, hash_util::HasHashMember<PipelineLayoutCompatDef>>; |
| 575 | using PipelineLayoutCompatId = PipelineLayoutCompatDict::Id; |
John Zulauf | f0d0639 | 2018-02-16 13:07:24 -0700 | [diff] [blame] | 576 | |
Tobin Ehlis | 0fc8567 | 2016-07-07 11:06:26 -0600 | [diff] [blame] | 577 | // Store layouts and pushconstants for PipelineLayout |
| 578 | struct PIPELINE_LAYOUT_NODE { |
| 579 | VkPipelineLayout layout; |
Tobin Ehlis | a8e46e7 | 2017-06-21 10:16:10 -0600 | [diff] [blame] | 580 | std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts; |
John Zulauf | f0d0639 | 2018-02-16 13:07:24 -0700 | [diff] [blame] | 581 | PushConstantRangesId push_constant_ranges; |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 582 | std::vector<PipelineLayoutCompatId> compat_for_set; |
Tobin Ehlis | 0fc8567 | 2016-07-07 11:06:26 -0600 | [diff] [blame] | 583 | |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 584 | PIPELINE_LAYOUT_NODE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {} |
Tobin Ehlis | 0fc8567 | 2016-07-07 11:06:26 -0600 | [diff] [blame] | 585 | |
| 586 | void reset() { |
| 587 | layout = VK_NULL_HANDLE; |
| 588 | set_layouts.clear(); |
John Zulauf | f0d0639 | 2018-02-16 13:07:24 -0700 | [diff] [blame] | 589 | push_constant_ranges.reset(); |
John Zulauf | 34ebf27 | 2018-02-16 13:08:47 -0700 | [diff] [blame] | 590 | compat_for_set.clear(); |
Tobin Ehlis | 0fc8567 | 2016-07-07 11:06:26 -0600 | [diff] [blame] | 591 | } |
| 592 | }; |
| 593 | |
Tobin Ehlis | 52c76a3 | 2016-10-12 09:05:51 -0600 | [diff] [blame] | 594 | class PIPELINE_STATE : public BASE_NODE { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 595 | public: |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 596 | VkPipeline pipeline; |
| 597 | safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI; |
Tobin Ehlis | 7f316d0 | 2017-09-18 08:38:37 -0600 | [diff] [blame] | 598 | // Hold shared ptr to RP in case RP itself is destroyed |
| 599 | std::shared_ptr<RENDER_PASS_STATE> rp_state; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 600 | safe_VkComputePipelineCreateInfo computePipelineCI; |
| 601 | // Flag of which shader stages are active for this pipeline |
| 602 | uint32_t active_shaders; |
| 603 | uint32_t duplicate_shaders; |
| 604 | // Capture which slots (set#->bindings) are actually used by the shaders of this pipeline |
Tobin Ehlis | cebc4c0 | 2016-08-22 10:10:43 -0600 | [diff] [blame] | 605 | std::unordered_map<uint32_t, std::map<uint32_t, descriptor_req>> active_slots; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 606 | // Vtx input info (if any) |
| 607 | std::vector<VkVertexInputBindingDescription> vertexBindingDescriptions; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 608 | std::vector<VkPipelineColorBlendAttachmentState> attachments; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 609 | bool blendConstantsEnabled; // Blend constants enabled for any attachments |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 610 | PIPELINE_LAYOUT_NODE pipeline_layout; |
Chris Forbes | 0771b67 | 2018-03-22 21:13:46 -0700 | [diff] [blame^] | 611 | VkPrimitiveTopology topology_at_rasterizer; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 612 | |
| 613 | // Default constructor |
Tobin Ehlis | 52c76a3 | 2016-10-12 09:05:51 -0600 | [diff] [blame] | 614 | PIPELINE_STATE() |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 615 | : pipeline{}, |
| 616 | graphicsPipelineCI{}, |
Tobin Ehlis | 7f316d0 | 2017-09-18 08:38:37 -0600 | [diff] [blame] | 617 | rp_state(nullptr), |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 618 | computePipelineCI{}, |
| 619 | active_shaders(0), |
| 620 | duplicate_shaders(0), |
| 621 | active_slots(), |
| 622 | vertexBindingDescriptions(), |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 623 | attachments(), |
| 624 | blendConstantsEnabled(false), |
Chris Forbes | 0771b67 | 2018-03-22 21:13:46 -0700 | [diff] [blame^] | 625 | pipeline_layout(), |
| 626 | topology_at_rasterizer{} {} |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 627 | |
Tobin Ehlis | 7f316d0 | 2017-09-18 08:38:37 -0600 | [diff] [blame] | 628 | void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate) { |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 629 | bool uses_color_attachment = false; |
| 630 | bool uses_depthstencil_attachment = false; |
| 631 | if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) { |
| 632 | const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass]; |
| 633 | |
| 634 | for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) { |
| 635 | if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) { |
| 636 | uses_color_attachment = true; |
| 637 | break; |
| 638 | } |
| 639 | } |
| 640 | |
| 641 | if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) { |
| 642 | uses_depthstencil_attachment = true; |
| 643 | } |
| 644 | } |
| 645 | graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment); |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 646 | // Make sure compute pipeline is null |
| 647 | VkComputePipelineCreateInfo emptyComputeCI = {}; |
| 648 | computePipelineCI.initialize(&emptyComputeCI); |
| 649 | for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) { |
| 650 | const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i]; |
| 651 | this->duplicate_shaders |= this->active_shaders & pPSSCI->stage; |
| 652 | this->active_shaders |= pPSSCI->stage; |
| 653 | } |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 654 | if (graphicsPipelineCI.pVertexInputState) { |
| 655 | const auto pVICI = graphicsPipelineCI.pVertexInputState; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 656 | if (pVICI->vertexBindingDescriptionCount) { |
| 657 | this->vertexBindingDescriptions = std::vector<VkVertexInputBindingDescription>( |
| 658 | pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount); |
| 659 | } |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 660 | } |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 661 | if (graphicsPipelineCI.pColorBlendState) { |
| 662 | const auto pCBCI = graphicsPipelineCI.pColorBlendState; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 663 | if (pCBCI->attachmentCount) { |
| 664 | this->attachments = std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments, |
| 665 | pCBCI->pAttachments + pCBCI->attachmentCount); |
| 666 | } |
| 667 | } |
Chris Forbes | 0771b67 | 2018-03-22 21:13:46 -0700 | [diff] [blame^] | 668 | if (graphicsPipelineCI.pInputAssemblyState) { |
| 669 | topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology; |
| 670 | } |
Tobin Ehlis | 7f316d0 | 2017-09-18 08:38:37 -0600 | [diff] [blame] | 671 | rp_state = rpstate; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 672 | } |
Chris Forbes | ebdd997 | 2017-08-15 11:09:13 -0700 | [diff] [blame] | 673 | |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 674 | void initComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo) { |
| 675 | computePipelineCI.initialize(pCreateInfo); |
| 676 | // Make sure gfx pipeline is null |
| 677 | VkGraphicsPipelineCreateInfo emptyGraphicsCI = {}; |
Petr Kraus | e91f7a1 | 2017-12-14 20:57:36 +0100 | [diff] [blame] | 678 | graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false); |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 679 | switch (computePipelineCI.stage.stage) { |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 680 | case VK_SHADER_STAGE_COMPUTE_BIT: |
| 681 | this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT; |
| 682 | break; |
| 683 | default: |
| 684 | // TODO : Flag error |
| 685 | break; |
Tobin Ehlis | eb00b0d | 2016-08-17 07:55:55 -0600 | [diff] [blame] | 686 | } |
| 687 | } |
| 688 | }; |
| 689 | |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 690 | // Track last states that are bound per pipeline bind point (Gfx & Compute) |
| 691 | struct LAST_BOUND_STATE { |
Tobin Ehlis | 52c76a3 | 2016-10-12 09:05:51 -0600 | [diff] [blame] | 692 | PIPELINE_STATE *pipeline_state; |
John Zulauf | aee1d53 | 2018-02-16 13:09:39 -0700 | [diff] [blame] | 693 | VkPipelineLayout pipeline_layout; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 694 | // Track each set that has been bound |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 695 | // Ordered bound set tracking where index is set# that given set is bound to |
| 696 | std::vector<cvdescriptorset::DescriptorSet *> boundDescriptorSets; |
Józef Kucia | f0c94d4 | 2017-10-25 22:15:22 +0200 | [diff] [blame] | 697 | std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 698 | // one dynamic offset per dynamic descriptor bound to this CB |
| 699 | std::vector<std::vector<uint32_t>> dynamicOffsets; |
John Zulauf | aee1d53 | 2018-02-16 13:09:39 -0700 | [diff] [blame] | 700 | std::vector<PipelineLayoutCompatId> compat_id_for_set; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 701 | |
| 702 | void reset() { |
Tobin Ehlis | 52c76a3 | 2016-10-12 09:05:51 -0600 | [diff] [blame] | 703 | pipeline_state = nullptr; |
John Zulauf | aee1d53 | 2018-02-16 13:09:39 -0700 | [diff] [blame] | 704 | pipeline_layout = VK_NULL_HANDLE; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 705 | boundDescriptorSets.clear(); |
Józef Kucia | f0c94d4 | 2017-10-25 22:15:22 +0200 | [diff] [blame] | 706 | push_descriptor_set = nullptr; |
Tobin Ehlis | 09d1671 | 2016-05-17 10:41:55 -0600 | [diff] [blame] | 707 | dynamicOffsets.clear(); |
| 708 | } |
| 709 | }; |
| 710 | // Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 711 | struct GLOBAL_CB_NODE : public BASE_NODE { |
| 712 | VkCommandBuffer commandBuffer; |
Chris Forbes | b2b4348 | 2017-06-06 16:05:26 -0700 | [diff] [blame] | 713 | VkCommandBufferAllocateInfo createInfo = {}; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 714 | VkCommandBufferBeginInfo beginInfo; |
| 715 | VkCommandBufferInheritanceInfo inheritanceInfo; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 716 | VkDevice device; // device this CB belongs to |
Chris Forbes | 05375e7 | 2017-04-21 13:15:15 -0700 | [diff] [blame] | 717 | bool hasDrawCmd; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 718 | CB_STATE state; // Track cmd buffer update state |
| 719 | uint64_t submitCount; // Number of times CB has been submitted |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 720 | typedef uint64_t ImageLayoutUpdateCount; |
| 721 | ImageLayoutUpdateCount image_layout_change_count; // The sequence number for changes to image layout (for cached validation) |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 722 | CBStatusFlags status; // Track status of various bindings on cmd buffer |
| 723 | CBStatusFlags static_status; // All state bits provided by current graphics pipeline |
| 724 | // rather than dynamic state |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 725 | // Currently storing "lastBound" objects on per-CB basis |
| 726 | // long-term may want to create caches of "lastBound" states and could have |
| 727 | // each individual CMD_NODE referencing its own "lastBound" state |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 728 | // Store last bound state for Gfx & Compute pipeline bind points |
| 729 | LAST_BOUND_STATE lastBound[VK_PIPELINE_BIND_POINT_RANGE_SIZE]; |
| 730 | |
Chris Forbes | 5fc7783 | 2016-07-28 14:15:38 +1200 | [diff] [blame] | 731 | uint32_t viewportMask; |
| 732 | uint32_t scissorMask; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 733 | VkRenderPassBeginInfo activeRenderPassBeginInfo; |
Tobin Ehlis | 95ccf3e | 2016-10-12 15:24:03 -0600 | [diff] [blame] | 734 | RENDER_PASS_STATE *activeRenderPass; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 735 | VkSubpassContents activeSubpassContents; |
| 736 | uint32_t activeSubpass; |
| 737 | VkFramebuffer activeFramebuffer; |
| 738 | std::unordered_set<VkFramebuffer> framebuffers; |
Tobin Ehlis | 96f1d60 | 2016-07-08 12:33:45 -0600 | [diff] [blame] | 739 | // Unified data structs to track objects bound to this command buffer as well as object |
| 740 | // dependencies that have been broken : either destroyed objects, or updated descriptor sets |
| 741 | std::unordered_set<VK_OBJECT> object_bindings; |
Tobin Ehlis | 2556f5b | 2016-06-24 17:22:16 -0600 | [diff] [blame] | 742 | std::vector<VK_OBJECT> broken_bindings; |
Tobin Ehlis | 96f1d60 | 2016-07-08 12:33:45 -0600 | [diff] [blame] | 743 | |
Michael Lentine | 860b0fe | 2016-05-20 10:14:00 -0500 | [diff] [blame] | 744 | std::unordered_set<VkEvent> waitedEvents; |
| 745 | std::vector<VkEvent> writeEventsBeforeWait; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 746 | std::vector<VkEvent> events; |
Michael Lentine | 860b0fe | 2016-05-20 10:14:00 -0500 | [diff] [blame] | 747 | std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 748 | std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 749 | std::unordered_set<QueryObject> activeQueries; |
| 750 | std::unordered_set<QueryObject> startedQueries; |
| 751 | std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 752 | std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap; |
| 753 | std::vector<DRAW_DATA> drawData; |
| 754 | DRAW_DATA currentDrawData; |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 755 | bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 756 | VkCommandBuffer primaryCommandBuffer; |
| 757 | // Track images and buffers that are updated by this CB at the point of a draw |
| 758 | std::unordered_set<VkImageView> updateImages; |
| 759 | std::unordered_set<VkBuffer> updateBuffers; |
Chris Forbes | 390578a | 2017-05-18 15:52:12 -0700 | [diff] [blame] | 760 | // If primary, the secondary command buffers we will call. |
| 761 | // If secondary, the primary command buffers we will be called by. |
| 762 | std::unordered_set<GLOBAL_CB_NODE *> linkedCommandBuffers; |
Tobin Ehlis | a17a529 | 2017-07-28 12:11:30 -0600 | [diff] [blame] | 763 | // Validation functions run at primary CB queue submit time |
| 764 | std::vector<std::function<bool()>> queue_submit_functions; |
Tobin Ehlis | 2d44ca7 | 2017-07-27 11:08:00 -0600 | [diff] [blame] | 765 | // Validation functions run when secondary CB is executed in primary |
Tobin Ehlis | 37ec75a | 2018-03-12 11:26:39 -0600 | [diff] [blame] | 766 | std::vector<std::function<bool(GLOBAL_CB_NODE *, VkFramebuffer)>> cmd_execute_commands_functions; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 767 | std::unordered_set<VkDeviceMemory> memObjs; |
| 768 | std::vector<std::function<bool(VkQueue)>> eventUpdates; |
Michael Lentine | 5627e69 | 2016-05-20 17:45:02 -0500 | [diff] [blame] | 769 | std::vector<std::function<bool(VkQueue)>> queryUpdates; |
John Zulauf | 48a6a70 | 2017-12-22 17:14:54 -0700 | [diff] [blame] | 770 | std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets; |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 771 | }; |
Chris Forbes | e2b0ade | 2016-06-09 09:40:55 +1200 | [diff] [blame] | 772 | |
Chris Forbes | 8320a8d | 2016-08-01 15:15:30 +1200 | [diff] [blame] | 773 | struct SEMAPHORE_WAIT { |
| 774 | VkSemaphore semaphore; |
| 775 | VkQueue queue; |
| 776 | uint64_t seq; |
| 777 | }; |
| 778 | |
Chris Forbes | 226c1de | 2016-06-10 12:43:13 +1200 | [diff] [blame] | 779 | struct CB_SUBMISSION { |
Mark Lobodzinski | 729a8d3 | 2017-01-26 12:16:30 -0700 | [diff] [blame] | 780 | CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores, |
Mike Schuchardt | 7788236 | 2017-07-07 08:31:59 -0600 | [diff] [blame] | 781 | std::vector<VkSemaphore> const &signalSemaphores, std::vector<VkSemaphore> const &externalSemaphores, |
| 782 | VkFence fence) |
| 783 | : cbs(cbs), |
| 784 | waitSemaphores(waitSemaphores), |
| 785 | signalSemaphores(signalSemaphores), |
| 786 | externalSemaphores(externalSemaphores), |
| 787 | fence(fence) {} |
Chris Forbes | 226c1de | 2016-06-10 12:43:13 +1200 | [diff] [blame] | 788 | |
Chris Forbes | c7d3c78 | 2016-06-22 11:57:17 +1200 | [diff] [blame] | 789 | std::vector<VkCommandBuffer> cbs; |
Chris Forbes | 8320a8d | 2016-08-01 15:15:30 +1200 | [diff] [blame] | 790 | std::vector<SEMAPHORE_WAIT> waitSemaphores; |
| 791 | std::vector<VkSemaphore> signalSemaphores; |
Mike Schuchardt | 7788236 | 2017-07-07 08:31:59 -0600 | [diff] [blame] | 792 | std::vector<VkSemaphore> externalSemaphores; |
Chris Forbes | 8320a8d | 2016-08-01 15:15:30 +1200 | [diff] [blame] | 793 | VkFence fence; |
Chris Forbes | 226c1de | 2016-06-10 12:43:13 +1200 | [diff] [blame] | 794 | }; |
| 795 | |
Mark Lobodzinski | c7daa8f | 2017-01-17 09:14:36 -0700 | [diff] [blame] | 796 | struct IMAGE_LAYOUT_NODE { |
| 797 | VkImageLayout layout; |
| 798 | VkFormat format; |
| 799 | }; |
| 800 | |
Mark Lobodzinski | 9ef5d56 | 2017-01-27 12:28:30 -0700 | [diff] [blame] | 801 | // CHECK_DISABLED struct is a container for bools that can block validation checks from being performed. |
| 802 | // The end goal is to have all checks guarded by a bool. The bools are all "false" by default meaning that all checks |
| 803 | // are enabled. At CreateInstance time, the user can use the VK_EXT_validation_flags extension to pass in enum values |
| 804 | // of VkValidationCheckEXT that will selectively disable checks. |
| 805 | struct CHECK_DISABLED { |
| 806 | bool command_buffer_state; |
| 807 | bool create_descriptor_set_layout; |
| 808 | bool destroy_buffer_view; // Skip validation at DestroyBufferView time |
| 809 | bool destroy_image_view; // Skip validation at DestroyImageView time |
| 810 | bool destroy_pipeline; // Skip validation at DestroyPipeline time |
| 811 | bool destroy_descriptor_pool; // Skip validation at DestroyDescriptorPool time |
| 812 | bool destroy_framebuffer; // Skip validation at DestroyFramebuffer time |
| 813 | bool destroy_renderpass; // Skip validation at DestroyRenderpass time |
| 814 | bool destroy_image; // Skip validation at DestroyImage time |
| 815 | bool destroy_sampler; // Skip validation at DestroySampler time |
| 816 | bool destroy_command_pool; // Skip validation at DestroyCommandPool time |
| 817 | bool destroy_event; // Skip validation at DestroyEvent time |
| 818 | bool free_memory; // Skip validation at FreeMemory time |
| 819 | bool object_in_use; // Skip all object in_use checking |
| 820 | bool idle_descriptor_set; // Skip check to verify that descriptor set is no in-use |
| 821 | bool push_constant_range; // Skip push constant range checks |
| 822 | bool free_descriptor_sets; // Skip validation prior to vkFreeDescriptorSets() |
| 823 | bool allocate_descriptor_sets; // Skip validation prior to vkAllocateDescriptorSets() |
| 824 | bool update_descriptor_sets; // Skip validation prior to vkUpdateDescriptorSets() |
| 825 | bool wait_for_fences; |
| 826 | bool get_fence_state; |
| 827 | bool queue_wait_idle; |
| 828 | bool device_wait_idle; |
| 829 | bool destroy_fence; |
| 830 | bool destroy_semaphore; |
| 831 | bool destroy_query_pool; |
| 832 | bool get_query_pool_results; |
| 833 | bool destroy_buffer; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 834 | bool shader_validation; // Skip validation for shaders |
Tobin Ehlis | f320b19 | 2017-03-14 11:22:50 -0600 | [diff] [blame] | 835 | |
| 836 | void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); } |
Mark Lobodzinski | 9ef5d56 | 2017-01-27 12:28:30 -0700 | [diff] [blame] | 837 | }; |
| 838 | |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 839 | struct MT_FB_ATTACHMENT_INFO { |
| 840 | IMAGE_VIEW_STATE *view_state; |
| 841 | VkImage image; |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 842 | }; |
| 843 | |
| 844 | class FRAMEBUFFER_STATE : public BASE_NODE { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 845 | public: |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 846 | VkFramebuffer framebuffer; |
| 847 | safe_VkFramebufferCreateInfo createInfo; |
Tobin Ehlis | be9e5d7 | 2017-09-07 14:24:36 -0600 | [diff] [blame] | 848 | std::shared_ptr<RENDER_PASS_STATE> rp_state; |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 849 | std::vector<MT_FB_ATTACHMENT_INFO> attachments; |
Tobin Ehlis | be9e5d7 | 2017-09-07 14:24:36 -0600 | [diff] [blame] | 850 | FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate) |
Tobin Ehlis | bb2039f | 2017-09-06 10:46:41 -0600 | [diff] [blame] | 851 | : framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){}; |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 852 | }; |
| 853 | |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 854 | struct shader_module; |
| 855 | struct DeviceExtensions; |
| 856 | |
Tobin Ehlis | f0606de | 2016-07-20 13:27:33 -0600 | [diff] [blame] | 857 | // Fwd declarations of layer_data and helpers to look-up/validate state from layer_data maps |
Tobin Ehlis | 94bc5d2 | 2016-06-02 07:46:52 -0600 | [diff] [blame] | 858 | namespace core_validation { |
| 859 | struct layer_data; |
Tobin Ehlis | b2e1e2c | 2017-02-08 09:16:32 -0700 | [diff] [blame] | 860 | cvdescriptorset::DescriptorSet *GetSetNode(const layer_data *, VkDescriptorSet); |
Tobin Ehlis | a8e46e7 | 2017-06-21 10:16:10 -0600 | [diff] [blame] | 861 | std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout); |
Tobin Ehlis | b2e1e2c | 2017-02-08 09:16:32 -0700 | [diff] [blame] | 862 | DESCRIPTOR_POOL_STATE *GetDescriptorPoolState(const layer_data *, const VkDescriptorPool); |
| 863 | BUFFER_STATE *GetBufferState(const layer_data *, VkBuffer); |
| 864 | IMAGE_STATE *GetImageState(const layer_data *, VkImage); |
| 865 | DEVICE_MEM_INFO *GetMemObjInfo(const layer_data *, VkDeviceMemory); |
| 866 | BUFFER_VIEW_STATE *GetBufferViewState(const layer_data *, VkBufferView); |
| 867 | SAMPLER_STATE *GetSamplerState(const layer_data *, VkSampler); |
| 868 | IMAGE_VIEW_STATE *GetImageViewState(const layer_data *, VkImageView); |
Tobin Ehlis | b2e1e2c | 2017-02-08 09:16:32 -0700 | [diff] [blame] | 869 | SWAPCHAIN_NODE *GetSwapchainNode(const layer_data *, VkSwapchainKHR); |
| 870 | GLOBAL_CB_NODE *GetCBNode(layer_data const *my_data, const VkCommandBuffer cb); |
Tobin Ehlis | 8ca7394 | 2017-09-07 09:16:49 -0600 | [diff] [blame] | 871 | RENDER_PASS_STATE *GetRenderPassState(layer_data const *dev_data, VkRenderPass renderpass); |
Tobin Ehlis | be9e5d7 | 2017-09-07 14:24:36 -0600 | [diff] [blame] | 872 | std::shared_ptr<RENDER_PASS_STATE> GetRenderPassStateSharedPtr(layer_data const *dev_data, VkRenderPass renderpass); |
Tobin Ehlis | b2e1e2c | 2017-02-08 09:16:32 -0700 | [diff] [blame] | 873 | FRAMEBUFFER_STATE *GetFramebufferState(const layer_data *my_data, VkFramebuffer framebuffer); |
Mark Lobodzinski | ab9be28 | 2017-02-09 12:01:27 -0700 | [diff] [blame] | 874 | COMMAND_POOL_NODE *GetCommandPoolNode(layer_data *dev_data, VkCommandPool pool); |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 875 | shader_module const *GetShaderModuleState(layer_data const *dev_data, VkShaderModule module); |
Mark Lobodzinski | efd933b | 2017-02-10 12:09:23 -0700 | [diff] [blame] | 876 | const PHYS_DEV_PROPERTIES_NODE *GetPhysDevProperties(const layer_data *device_data); |
Mark Lobodzinski | d678dcc | 2017-03-13 09:25:44 -0600 | [diff] [blame] | 877 | const VkPhysicalDeviceFeatures *GetEnabledFeatures(const layer_data *device_data); |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 878 | const DeviceExtensions *GetEnabledExtensions(const layer_data *device_data); |
Mark Lobodzinski | 3c0f636 | 2017-02-01 13:35:48 -0700 | [diff] [blame] | 879 | |
Tobin Ehlis | ab294d8 | 2016-11-21 15:23:51 -0700 | [diff] [blame] | 880 | void invalidateCommandBuffers(const layer_data *, std::unordered_set<GLOBAL_CB_NODE *> const &, VK_OBJECT); |
Tobin Ehlis | e1995fc | 2016-12-22 12:45:09 -0700 | [diff] [blame] | 881 | bool ValidateMemoryIsBoundToBuffer(const layer_data *, const BUFFER_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE); |
| 882 | bool ValidateMemoryIsBoundToImage(const layer_data *, const IMAGE_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE); |
Tobin Ehlis | fad7adf | 2016-10-20 06:50:37 -0600 | [diff] [blame] | 883 | void AddCommandBufferBindingSampler(GLOBAL_CB_NODE *, SAMPLER_STATE *); |
Tobin Ehlis | 30df15c | 2016-10-12 17:17:57 -0600 | [diff] [blame] | 884 | void AddCommandBufferBindingImage(const layer_data *, GLOBAL_CB_NODE *, IMAGE_STATE *); |
Tobin Ehlis | 15b8ea0 | 2016-09-19 14:02:58 -0600 | [diff] [blame] | 885 | void AddCommandBufferBindingImageView(const layer_data *, GLOBAL_CB_NODE *, IMAGE_VIEW_STATE *); |
Tobin Ehlis | 4668dce | 2016-11-16 09:30:23 -0700 | [diff] [blame] | 886 | void AddCommandBufferBindingBuffer(const layer_data *, GLOBAL_CB_NODE *, BUFFER_STATE *); |
Tobin Ehlis | 2515c0e | 2016-09-28 07:12:28 -0600 | [diff] [blame] | 887 | void AddCommandBufferBindingBufferView(const layer_data *, GLOBAL_CB_NODE *, BUFFER_VIEW_STATE *); |
Mike Schuchardt | a502565 | 2017-09-27 14:56:21 -0600 | [diff] [blame] | 888 | bool ValidateObjectNotInUse(const layer_data *dev_data, BASE_NODE *obj_node, VK_OBJECT obj_struct, const char *caller_name, |
| 889 | UNIQUE_VALIDATION_ERROR_CODE error_code); |
Mark Lobodzinski | 9ef5d56 | 2017-01-27 12:28:30 -0700 | [diff] [blame] | 890 | void invalidateCommandBuffers(const layer_data *dev_data, std::unordered_set<GLOBAL_CB_NODE *> const &cb_nodes, VK_OBJECT obj); |
| 891 | void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info); |
Mark Lobodzinski | 306441e | 2017-02-10 13:48:38 -0700 | [diff] [blame] | 892 | void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info); |
Mark Lobodzinski | 3382637 | 2017-04-13 11:10:11 -0600 | [diff] [blame] | 893 | bool ClearMemoryObjectBindings(layer_data *dev_data, uint64_t handle, VulkanObjectType type); |
Tobin Ehlis | 051a65f | 2017-07-11 11:24:22 -0600 | [diff] [blame] | 894 | bool ValidateCmdQueueFlags(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node, const char *caller_name, VkQueueFlags flags, |
Mike Schuchardt | 9c58240 | 2017-02-23 15:57:37 -0700 | [diff] [blame] | 895 | UNIQUE_VALIDATION_ERROR_CODE error_code); |
Tobin Ehlis | 051a65f | 2017-07-11 11:24:22 -0600 | [diff] [blame] | 896 | bool ValidateCmd(layer_data *my_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd, const char *caller_name); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 897 | bool insideRenderPass(const layer_data *my_data, const GLOBAL_CB_NODE *pCB, const char *apiName, |
| 898 | UNIQUE_VALIDATION_ERROR_CODE msgCode); |
Mark Lobodzinski | d81d101 | 2017-02-01 09:03:06 -0700 | [diff] [blame] | 899 | void SetImageMemoryValid(layer_data *dev_data, IMAGE_STATE *image_state, bool valid); |
Mark Lobodzinski | 2def2bf | 2017-02-02 15:22:50 -0700 | [diff] [blame] | 900 | bool outsideRenderPass(const layer_data *my_data, GLOBAL_CB_NODE *pCB, const char *apiName, UNIQUE_VALIDATION_ERROR_CODE msgCode); |
Tobin Ehlis | c3b6c4c | 2017-02-02 17:26:40 -0700 | [diff] [blame] | 901 | void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const IMAGE_CMD_BUF_LAYOUT_NODE &node); |
| 902 | void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const VkImageLayout &layout); |
Mark Lobodzinski | 8e0c0bf | 2017-02-06 11:06:26 -0700 | [diff] [blame] | 903 | bool ValidateImageMemoryIsValid(layer_data *dev_data, IMAGE_STATE *image_state, const char *functionName); |
| 904 | bool ValidateImageSampleCount(layer_data *dev_data, IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count, |
| 905 | const char *location, UNIQUE_VALIDATION_ERROR_CODE msgCode); |
Mark Lobodzinski | 08f14fa | 2017-02-07 17:20:06 -0700 | [diff] [blame] | 906 | bool rangesIntersect(layer_data const *dev_data, MEMORY_RANGE const *range1, VkDeviceSize offset, VkDeviceSize end); |
Mark Lobodzinski | 680421d | 2017-02-09 13:06:56 -0700 | [diff] [blame] | 907 | bool ValidateBufferMemoryIsValid(layer_data *dev_data, BUFFER_STATE *buffer_state, const char *functionName); |
| 908 | void SetBufferMemoryValid(layer_data *dev_data, BUFFER_STATE *buffer_state, bool valid); |
Mark Lobodzinski | d2b2f61 | 2017-02-15 13:45:18 -0700 | [diff] [blame] | 909 | bool ValidateCmdSubpassState(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd_type); |
John Zulauf | 5c2750c | 2018-01-30 15:04:56 -0700 | [diff] [blame] | 910 | bool ValidateCmd(layer_data *dev_data, const GLOBAL_CB_NODE *cb_state, const CMD_TYPE cmd, const char *caller_name); |
Mark Lobodzinski | d81d101 | 2017-02-01 09:03:06 -0700 | [diff] [blame] | 911 | |
Mark Lobodzinski | 90224de | 2017-01-26 15:23:11 -0700 | [diff] [blame] | 912 | // Prototypes for layer_data accessor functions. These should be in their own header file at some point |
Jeremy Kniager | 7ec550f | 2017-08-16 14:57:42 -0600 | [diff] [blame] | 913 | VkFormatProperties GetFormatProperties(core_validation::layer_data *device_data, VkFormat format); |
Dave Houlton | 130c021 | 2018-01-29 13:39:56 -0700 | [diff] [blame] | 914 | VkResult GetImageFormatProperties(core_validation::layer_data *device_data, const VkImageCreateInfo *image_ci, |
| 915 | VkImageFormatProperties *image_format_properties); |
Tobin Ehlis | f320b19 | 2017-03-14 11:22:50 -0600 | [diff] [blame] | 916 | const debug_report_data *GetReportData(const layer_data *); |
Mark Lobodzinski | 90224de | 2017-01-26 15:23:11 -0700 | [diff] [blame] | 917 | const VkPhysicalDeviceProperties *GetPhysicalDeviceProperties(layer_data *); |
Mark Lobodzinski | 9ef5d56 | 2017-01-27 12:28:30 -0700 | [diff] [blame] | 918 | const CHECK_DISABLED *GetDisables(layer_data *); |
| 919 | std::unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>> *GetImageMap(core_validation::layer_data *); |
| 920 | std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *GetImageSubresourceMap(layer_data *); |
| 921 | std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> *GetImageLayoutMap(layer_data *); |
Tobin Ehlis | c826645 | 2017-04-07 12:20:30 -0600 | [diff] [blame] | 922 | std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const *GetImageLayoutMap(layer_data const *); |
Mark Lobodzinski | 9621074 | 2017-02-09 10:33:46 -0700 | [diff] [blame] | 923 | std::unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>> *GetBufferMap(layer_data *device_data); |
| 924 | std::unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>> *GetBufferViewMap(layer_data *device_data); |
Mark Lobodzinski | 602de98 | 2017-02-09 11:01:33 -0700 | [diff] [blame] | 925 | std::unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>> *GetImageViewMap(layer_data *device_data); |
Chris Forbes | 3fdf41f | 2017-05-02 14:32:26 -0700 | [diff] [blame] | 926 | const DeviceExtensions *GetDeviceExtensions(const layer_data *); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 927 | } // namespace core_validation |
Tobin Ehlis | 8481f4d | 2016-05-17 08:01:41 -0600 | [diff] [blame] | 928 | |
Mark Lobodzinski | 64318ba | 2017-01-26 13:34:13 -0700 | [diff] [blame] | 929 | #endif // CORE_VALIDATION_TYPES_H_ |