blob: 5e6d310b904a7697a67e9b25098bae46ee27737d [file] [log] [blame]
Tobin Ehlis0a43bde2016-05-03 08:31:08 -06001/* Copyright (c) 2015-2016 The Khronos Group Inc.
2 * Copyright (c) 2015-2016 Valve Corporation
3 * Copyright (c) 2015-2016 LunarG, Inc.
4 * Copyright (C) 2015-2016 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
19 * Author: Tobin Ehlis <tobine@google.com>
20 * Author: Chris Forbes <chrisf@ijw.co.nz>
21 * Author: Mark Lobodzinski <mark@lunarg.com>
22 */
23#ifndef CORE_VALIDATION_TYPES_H_
24#define CORE_VALIDATION_TYPES_H_
25
John Zulauff0d06392018-02-16 13:07:24 -070026#include "hash_vk_types.h"
Tobin Ehliseb00b0d2016-08-17 07:55:55 -060027#include "vk_safe_struct.h"
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060028#include "vulkan/vulkan.h"
Mark Lobodzinskif8d54482017-01-17 13:09:40 -070029#include "vk_validation_error_messages.h"
Mark Lobodzinski90224de2017-01-26 15:23:11 -070030#include "vk_layer_logging.h"
Mark Lobodzinski33826372017-04-13 11:10:11 -060031#include "vk_object_types.h"
Mark Lobodzinski28426ae2017-06-01 07:56:38 -060032#include "vk_extension_helper.h"
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060033#include <atomic>
Tobin Ehlis8481f4d2016-05-17 08:01:41 -060034#include <functional>
Tobin Ehliscebc4c02016-08-22 10:10:43 -060035#include <map>
Tobin Ehliseb00b0d2016-08-17 07:55:55 -060036#include <string.h>
37#include <unordered_map>
38#include <unordered_set>
39#include <vector>
Mark Lobodzinski9ef5d562017-01-27 12:28:30 -070040#include <memory>
Mark Lobodzinskiab9be282017-02-09 12:01:27 -070041#include <list>
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060042
John Zulauf34ebf272018-02-16 13:08:47 -070043// Fwd declarations -- including descriptor_set.h creates an ugly include loop
Tobin Ehlis09d16712016-05-17 10:41:55 -060044namespace cvdescriptorset {
John Zulauf34ebf272018-02-16 13:08:47 -070045class DescriptorSetLayoutDef;
Tobin Ehlis815e8132016-06-02 13:02:17 -060046class DescriptorSetLayout;
Tobin Ehlis09d16712016-05-17 10:41:55 -060047class DescriptorSet;
Dave Houltona9df0ce2018-02-07 10:51:23 -070048} // namespace cvdescriptorset
Tobin Ehlis09d16712016-05-17 10:41:55 -060049
Tobin Ehlis965cd8a2016-06-24 14:41:20 -060050struct GLOBAL_CB_NODE;
51
Mark Lobodzinski5c6f9ef2017-06-09 16:35:43 -060052enum CALL_STATE {
53 UNCALLED, // Function has not been called
54 QUERY_COUNT, // Function called once to query a count
55 QUERY_DETAILS, // Function called w/ a count to query details
56};
57
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060058class BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070059 public:
Tobin Ehlis08edee62016-06-23 17:03:56 -060060 // Track when object is being used by an in-flight command buffer
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060061 std::atomic_int in_use;
Tobin Ehlis08edee62016-06-23 17:03:56 -060062 // Track command buffers that this object is bound to
63 // binding initialized when cmd referencing object is bound to command buffer
64 // binding removed when command buffer is reset or destroyed
65 // When an object is destroyed, any bound cbs are set to INVALID
Tobin Ehlis965cd8a2016-06-24 14:41:20 -060066 std::unordered_set<GLOBAL_CB_NODE *> cb_bindings;
Tobin Ehlis2f85ff52016-09-07 15:18:08 -060067
68 BASE_NODE() { in_use.store(0); };
Tobin Ehlis0a43bde2016-05-03 08:31:08 -060069};
70
Mark Lobodzinskiab9be282017-02-09 12:01:27 -070071// Track command pools and their command buffers
72struct COMMAND_POOL_NODE : public BASE_NODE {
73 VkCommandPoolCreateFlags createFlags;
74 uint32_t queueFamilyIndex;
John Zulaufc51840b2017-11-02 18:12:49 -060075 // Cmd buffers allocated from this pool
76 std::unordered_set<VkCommandBuffer> commandBuffers;
Mark Lobodzinskiab9be282017-02-09 12:01:27 -070077};
78
Tobin Ehlis2556f5b2016-06-24 17:22:16 -060079// Generic wrapper for vulkan objects
80struct VK_OBJECT {
81 uint64_t handle;
Mark Lobodzinski33826372017-04-13 11:10:11 -060082 VulkanObjectType type;
Tobin Ehlis2556f5b2016-06-24 17:22:16 -060083};
84
Tobin Ehlis96f1d602016-07-08 12:33:45 -060085inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
86
87namespace std {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070088template <>
89struct hash<VK_OBJECT> {
Tobin Ehlis96f1d602016-07-08 12:33:45 -060090 size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
91};
Dave Houltona9df0ce2018-02-07 10:51:23 -070092} // namespace std
Tobin Ehlis96f1d602016-07-08 12:33:45 -060093
Mark Lobodzinskiab9be282017-02-09 12:01:27 -070094class PHYS_DEV_PROPERTIES_NODE {
Dave Houltona9df0ce2018-02-07 10:51:23 -070095 public:
Mark Lobodzinskiab9be282017-02-09 12:01:27 -070096 VkPhysicalDeviceProperties properties;
97 std::vector<VkQueueFamilyProperties> queue_family_properties;
98};
99
Chris Forbesbff35d52016-07-25 18:11:50 +1200100// Flags describing requirements imposed by the pipeline on a descriptor. These
101// can't be checked at pipeline creation time as they depend on the Image or
102// ImageView bound.
103enum descriptor_req {
104 DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D,
105 DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY,
106 DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D,
107 DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY,
108 DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D,
109 DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE,
110 DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
111
Chris Forbes65bf8102016-08-31 12:00:35 -0700112 DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_END_RANGE + 1)) - 1,
113
Chris Forbesbff35d52016-07-25 18:11:50 +1200114 DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_END_RANGE,
115 DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1,
116};
117
Tobin Ehlisbd711bd2016-10-12 14:27:30 -0600118struct DESCRIPTOR_POOL_STATE : BASE_NODE {
Tobin Ehlis7c701c02016-05-26 11:20:13 -0600119 VkDescriptorPool pool;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700120 uint32_t maxSets; // Max descriptor sets allowed in this pool
121 uint32_t availableSets; // Available descriptor sets in this pool
Tobin Ehlis7c701c02016-05-26 11:20:13 -0600122
Chris Forbesf566aae2017-04-24 16:59:02 -0700123 safe_VkDescriptorPoolCreateInfo createInfo;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700124 std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool
125 std::vector<uint32_t> maxDescriptorTypeCount; // Max # of descriptors of each type in this pool
126 std::vector<uint32_t> availableDescriptorTypeCount; // Available # of descriptors of each type in this pool
Tobin Ehlis7c701c02016-05-26 11:20:13 -0600127
Tobin Ehlisbd711bd2016-10-12 14:27:30 -0600128 DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700129 : pool(pool),
130 maxSets(pCreateInfo->maxSets),
131 availableSets(pCreateInfo->maxSets),
Chris Forbesf566aae2017-04-24 16:59:02 -0700132 createInfo(pCreateInfo),
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700133 maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0),
134 availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) {
Chris Forbesf566aae2017-04-24 16:59:02 -0700135 // Collect maximums per descriptor type.
136 for (uint32_t i = 0; i < createInfo.poolSizeCount; ++i) {
137 uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
138 // Same descriptor types can appear several times
139 maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount;
140 availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
Tobin Ehlis7c701c02016-05-26 11:20:13 -0600141 }
142 }
Tobin Ehlis7c701c02016-05-26 11:20:13 -0600143};
144
Tobin Ehlis54108272016-10-11 14:26:49 -0600145// Generic memory binding struct to track objects bound to objects
146struct MEM_BINDING {
147 VkDeviceMemory mem;
148 VkDeviceSize offset;
149 VkDeviceSize size;
150};
151
152inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; }
153
154namespace std {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700155template <>
156struct hash<MEM_BINDING> {
Tobin Ehlis54108272016-10-11 14:26:49 -0600157 size_t operator()(MEM_BINDING mb) const NOEXCEPT {
158 auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset);
159 return intermediate ^ hash<uint64_t>()(mb.size);
160 }
161};
Dave Houltona9df0ce2018-02-07 10:51:23 -0700162} // namespace std
Tobin Ehlis54108272016-10-11 14:26:49 -0600163
Tobin Ehlis530bb0a2016-11-16 08:57:22 -0700164// Superclass for bindable object state (currently images and buffers)
Tobin Ehlis54108272016-10-11 14:26:49 -0600165class BINDABLE : public BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700166 public:
167 bool sparse; // Is this object being bound with sparse memory or not?
Tobin Ehlis54108272016-10-11 14:26:49 -0600168 // Non-sparse binding data
169 MEM_BINDING binding;
Tobin Ehlis530bb0a2016-11-16 08:57:22 -0700170 // Memory requirements for this BINDABLE
171 VkMemoryRequirements requirements;
Tobin Ehlis8c189702016-11-17 13:39:57 -0700172 // bool to track if memory requirements were checked
173 bool memory_requirements_checked;
Tobin Ehlis54108272016-10-11 14:26:49 -0600174 // Sparse binding data, initially just tracking MEM_BINDING per mem object
175 // There's more data for sparse bindings so need better long-term solution
176 // TODO : Need to update solution to track all sparse binding data
177 std::unordered_set<MEM_BINDING> sparse_bindings;
John Zulauf4c7b5522017-12-15 14:35:06 -0700178
179 std::unordered_set<VkDeviceMemory> bound_memory_set_;
180
181 BINDABLE()
182 : sparse(false), binding{}, requirements{}, memory_requirements_checked(false), sparse_bindings{}, bound_memory_set_{} {};
183
184 // Update the cached set of memory bindings.
185 // Code that changes binding.mem or sparse_bindings must call UpdateBoundMemorySet()
186 void UpdateBoundMemorySet() {
187 bound_memory_set_.clear();
Tobin Ehlis640a81c2016-11-15 15:37:18 -0700188 if (!sparse) {
John Zulauf4c7b5522017-12-15 14:35:06 -0700189 bound_memory_set_.insert(binding.mem);
Tobin Ehlis640a81c2016-11-15 15:37:18 -0700190 } else {
191 for (auto sb : sparse_bindings) {
John Zulauf4c7b5522017-12-15 14:35:06 -0700192 bound_memory_set_.insert(sb.mem);
Tobin Ehlis640a81c2016-11-15 15:37:18 -0700193 }
194 }
Tobin Ehlis640a81c2016-11-15 15:37:18 -0700195 }
John Zulauf4c7b5522017-12-15 14:35:06 -0700196
197 // Return unordered set of memory objects that are bound
198 // Instead of creating a set from scratch each query, return the cached one
199 const std::unordered_set<VkDeviceMemory> &GetBoundMemory() const { return bound_memory_set_; }
Tobin Ehlis54108272016-10-11 14:26:49 -0600200};
201
Tobin Ehlis4668dce2016-11-16 09:30:23 -0700202class BUFFER_STATE : public BINDABLE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700203 public:
Tobin Ehlis08edee62016-06-23 17:03:56 -0600204 VkBuffer buffer;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600205 VkBufferCreateInfo createInfo;
Tobin Ehlis4668dce2016-11-16 09:30:23 -0700206 BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
Tony Barbour00bafef2017-04-26 13:59:35 -0600207 if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700208 uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
209 for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
210 pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
211 }
212 createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
213 }
214
Tobin Ehlis54108272016-10-11 14:26:49 -0600215 if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
216 sparse = true;
217 }
218 };
Chris Forbesda914b62016-09-22 18:51:58 +1200219
Tobin Ehlis4668dce2016-11-16 09:30:23 -0700220 BUFFER_STATE(BUFFER_STATE const &rh_obj) = delete;
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700221
222 ~BUFFER_STATE() {
Tony Barbour00bafef2017-04-26 13:59:35 -0600223 if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700224 delete[] createInfo.pQueueFamilyIndices;
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700225 createInfo.pQueueFamilyIndices = nullptr;
226 }
227 };
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600228};
229
Tobin Ehlis8b872462016-09-14 08:12:08 -0600230class BUFFER_VIEW_STATE : public BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700231 public:
Tobin Ehlis8b872462016-09-14 08:12:08 -0600232 VkBufferView buffer_view;
233 VkBufferViewCreateInfo create_info;
Tobin Ehlis8b872462016-09-14 08:12:08 -0600234 BUFFER_VIEW_STATE(VkBufferView bv, const VkBufferViewCreateInfo *ci) : buffer_view(bv), create_info(*ci){};
Chris Forbesfd52b2c2016-09-26 15:23:32 +1300235 BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete;
Tobin Ehlis8b872462016-09-14 08:12:08 -0600236};
237
Tobin Ehlisfad7adf2016-10-20 06:50:37 -0600238struct SAMPLER_STATE : public BASE_NODE {
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600239 VkSampler sampler;
240 VkSamplerCreateInfo createInfo;
241
Tobin Ehlisfad7adf2016-10-20 06:50:37 -0600242 SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci){};
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600243};
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600244
Tobin Ehlis54108272016-10-11 14:26:49 -0600245class IMAGE_STATE : public BINDABLE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700246 public:
Tobin Ehlis6b9c9452016-06-28 14:52:11 -0600247 VkImage image;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600248 VkImageCreateInfo createInfo;
Dave Houltona9df0ce2018-02-07 10:51:23 -0700249 bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
250 bool acquired; // If this is a swapchain image, has it been acquired by the app.
251 bool shared_presentable; // True for a front-buffered swapchain image
252 bool layout_locked; // A front-buffered image that has been presented can never have layout transitioned
253 bool get_sparse_reqs_called; // Track if GetImageSparseMemoryRequirements() has been called for this image
Tobin Ehlis7eee1482018-02-08 11:19:10 -0700254 bool sparse_metadata_required; // Track if sparse metadata aspect is required for this image
255 bool sparse_metadata_bound; // Track if sparse metadata aspect is bound to this image
256 std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
Tobin Ehlis30df15c2016-10-12 17:17:57 -0600257 IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
Tobin Ehlis7eee1482018-02-08 11:19:10 -0700258 : image(img),
259 createInfo(*pCreateInfo),
260 valid(false),
261 acquired(false),
262 shared_presentable(false),
263 layout_locked(false),
264 get_sparse_reqs_called(false),
Tobin Ehlis880f4c92018-02-08 14:10:35 -0700265 sparse_metadata_required(false),
266 sparse_metadata_bound(false),
Tobin Ehlis7eee1482018-02-08 11:19:10 -0700267 sparse_requirements{} {
Tony Barbour00bafef2017-04-26 13:59:35 -0600268 if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700269 uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
270 for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
271 pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
272 }
273 createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
274 }
275
Tobin Ehlis54108272016-10-11 14:26:49 -0600276 if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
277 sparse = true;
278 }
279 };
Chris Forbesda914b62016-09-22 18:51:58 +1200280
Tobin Ehlis30df15c2016-10-12 17:17:57 -0600281 IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700282
283 ~IMAGE_STATE() {
Tony Barbour00bafef2017-04-26 13:59:35 -0600284 if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700285 delete[] createInfo.pQueueFamilyIndices;
Peter Lohrmann1f0ab142017-03-17 16:58:14 -0700286 createInfo.pQueueFamilyIndices = nullptr;
287 }
288 };
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600289};
290
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600291class IMAGE_VIEW_STATE : public BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700292 public:
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600293 VkImageView image_view;
294 VkImageViewCreateInfo create_info;
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600295 IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci) : image_view(iv), create_info(*ci){};
Chris Forbesfd52b2c2016-09-26 15:23:32 +1300296 IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
Tobin Ehlis8b26a382016-09-14 08:02:49 -0600297};
298
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600299struct MemRange {
300 VkDeviceSize offset;
301 VkDeviceSize size;
302};
303
304struct MEMORY_RANGE {
305 uint64_t handle;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700306 bool image; // True for image, false for buffer
307 bool linear; // True for buffers and linear images
308 bool valid; // True if this range is know to be valid
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600309 VkDeviceMemory memory;
310 VkDeviceSize start;
Tobin Ehlis12a4b5e2016-08-08 12:33:11 -0600311 VkDeviceSize size;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700312 VkDeviceSize end; // Store this pre-computed for simplicity
Tobin Ehlis12a4b5e2016-08-08 12:33:11 -0600313 // Set of ptrs to every range aliased with this one
314 std::unordered_set<MEMORY_RANGE *> aliases;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600315};
316
317// Data struct for tracking memory object
Tobin Ehlis83e14ca2016-09-14 11:21:55 -0600318struct DEVICE_MEM_INFO : public BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700319 void *object; // Dispatchable object used to create this memory (device of swapchain)
Mike Schuchardta8d1a252017-10-26 14:05:52 -0600320 bool global_valid; // If allocation is mapped or external, set to "true" to be picked up by subsequently bound ranges
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600321 VkDeviceMemory mem;
Tobin Ehlis06d54a12016-08-04 08:03:32 -0600322 VkMemoryAllocateInfo alloc_info;
John Zulauf5d79b822018-03-02 09:13:09 -0700323 bool is_dedicated;
324 VkBuffer dedicated_buffer;
325 VkImage dedicated_image;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700326 std::unordered_set<VK_OBJECT> obj_bindings; // objects bound to this memory
327 std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges; // Map of object to its binding range
Tobin Ehlis12a4b5e2016-08-08 12:33:11 -0600328 // Convenience vectors image/buff handles to speed up iterating over images or buffers independently
329 std::unordered_set<uint64_t> bound_images;
330 std::unordered_set<uint64_t> bound_buffers;
Tobin Ehlisbf8ac342016-08-04 07:53:46 -0600331
Tobin Ehlis06d54a12016-08-04 08:03:32 -0600332 MemRange mem_range;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700333 void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space
334 void *shadow_copy; // Pointer to start of guard-band data before mapped region
335 uint64_t shadow_pad_size; // Size of the guard-band data before and after actual data. It MUST be a
336 // multiple of limits.minMemoryMapAlignment
337 void *p_driver_data; // Pointer to application's actual memory
Mark Lobodzinski066b8422016-08-15 14:27:26 -0600338
Tobin Ehlis997b2582016-06-02 08:43:37 -0600339 DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700340 : object(disp_object),
341 global_valid(false),
342 mem(in_mem),
343 alloc_info(*p_alloc_info),
John Zulauf5d79b822018-03-02 09:13:09 -0700344 is_dedicated(false),
345 dedicated_buffer(VK_NULL_HANDLE),
346 dedicated_image(VK_NULL_HANDLE),
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700347 mem_range{},
348 shadow_copy_base(0),
349 shadow_copy(0),
350 shadow_pad_size(0),
351 p_driver_data(0){};
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600352};
353
354class SWAPCHAIN_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700355 public:
Chris Forbes0b52ddf2016-10-03 14:12:59 +1300356 safe_VkSwapchainCreateInfoKHR createInfo;
Chris Forbese06305e2016-10-06 11:07:30 +1300357 VkSwapchainKHR swapchain;
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600358 std::vector<VkImage> images;
Chris Forbes8a047d02016-12-09 10:39:26 +1300359 bool replaced = false;
Tobin Ehlisbb03e5f2017-05-11 08:52:51 -0600360 bool shared_presentable = false;
Mark Lobodzinski5c6f9ef2017-06-09 16:35:43 -0600361 CALL_STATE vkGetSwapchainImagesKHRState = UNCALLED;
362 uint32_t get_swapchain_image_count = 0;
Chris Forbese06305e2016-10-06 11:07:30 +1300363 SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain)
364 : createInfo(pCreateInfo), swapchain(swapchain) {}
Tobin Ehlis0a43bde2016-05-03 08:31:08 -0600365};
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600366
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600367class IMAGE_CMD_BUF_LAYOUT_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700368 public:
Chris Forbesda914b62016-09-22 18:51:58 +1200369 IMAGE_CMD_BUF_LAYOUT_NODE() = default;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600370 IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput)
371 : initialLayout(initialLayoutInput), layout(layoutInput) {}
372
373 VkImageLayout initialLayout;
374 VkImageLayout layout;
375};
376
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600377// Store the DAG.
378struct DAGNode {
379 uint32_t pass;
380 std::vector<uint32_t> prev;
381 std::vector<uint32_t> next;
382};
383
Tobin Ehlis95ccf3e2016-10-12 15:24:03 -0600384struct RENDER_PASS_STATE : public BASE_NODE {
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600385 VkRenderPass renderPass;
Chris Forbesef730462016-09-27 12:03:31 +1300386 safe_VkRenderPassCreateInfo createInfo;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600387 std::vector<bool> hasSelfDependency;
388 std::vector<DAGNode> subpassToNode;
Chris Forbes4ea3d492017-10-12 12:25:00 -0700389 std::vector<int32_t> subpass_to_dependency_index; // srcSubpass to dependency index of self dep, or -1 if none
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600390 std::unordered_map<uint32_t, bool> attachment_first_read;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600391
Tobin Ehlis95ccf3e2016-10-12 15:24:03 -0600392 RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) : createInfo(pCreateInfo) {}
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600393};
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600394
John Zulauf52aef1f2018-01-30 09:24:38 -0700395// vkCmd tracking -- complete as of header 1.0.68
396// please keep in "none, then sorted" order
397// Note: grepping vulkan.h for VKAPI_CALL.*vkCmd will return all functions except vkEndCommandBuffer
398
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600399enum CMD_TYPE {
Chris Forbese46e0a12016-12-20 11:33:11 +1300400 CMD_NONE,
John Zulauf52aef1f2018-01-30 09:24:38 -0700401 CMD_BEGINQUERY,
402 CMD_BEGINRENDERPASS,
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600403 CMD_BINDDESCRIPTORSETS,
404 CMD_BINDINDEXBUFFER,
John Zulauf52aef1f2018-01-30 09:24:38 -0700405 CMD_BINDPIPELINE,
406 CMD_BINDVERTEXBUFFERS,
407 CMD_BLITIMAGE,
408 CMD_CLEARATTACHMENTS,
409 CMD_CLEARCOLORIMAGE,
410 CMD_CLEARDEPTHSTENCILIMAGE,
411 CMD_COPYBUFFER,
412 CMD_COPYBUFFERTOIMAGE,
413 CMD_COPYIMAGE,
414 CMD_COPYIMAGETOBUFFER,
415 CMD_COPYQUERYPOOLRESULTS,
416 CMD_DEBUGMARKERBEGINEXT,
417 CMD_DEBUGMARKERENDEXT,
418 CMD_DEBUGMARKERINSERTEXT,
419 CMD_DISPATCH,
420 CMD_DISPATCHBASEKHX,
421 CMD_DISPATCHINDIRECT,
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600422 CMD_DRAW,
423 CMD_DRAWINDEXED,
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600424 CMD_DRAWINDEXEDINDIRECT,
Tobin Ehlis54741e12017-09-06 14:28:45 -0600425 CMD_DRAWINDEXEDINDIRECTCOUNTAMD,
John Zulauf52aef1f2018-01-30 09:24:38 -0700426 CMD_DRAWINDIRECT,
427 CMD_DRAWINDIRECTCOUNTAMD,
428 CMD_ENDCOMMANDBUFFER, // Should be the last command in any RECORDED cmd buffer
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600429 CMD_ENDQUERY,
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600430 CMD_ENDRENDERPASS,
431 CMD_EXECUTECOMMANDS,
John Zulauf52aef1f2018-01-30 09:24:38 -0700432 CMD_FILLBUFFER,
433 CMD_NEXTSUBPASS,
434 CMD_PIPELINEBARRIER,
435 CMD_PROCESSCOMMANDSNVX,
436 CMD_PUSHCONSTANTS,
437 CMD_PUSHDESCRIPTORSETKHR,
438 CMD_PUSHDESCRIPTORSETWITHTEMPLATEKHR,
439 CMD_RESERVESPACEFORCOMMANDSNVX,
440 CMD_RESETEVENT,
441 CMD_RESETQUERYPOOL,
442 CMD_RESOLVEIMAGE,
443 CMD_SETBLENDCONSTANTS,
444 CMD_SETDEPTHBIAS,
445 CMD_SETDEPTHBOUNDS,
446 CMD_SETDEVICEMASKKHX,
447 CMD_SETDISCARDRECTANGLEEXT,
448 CMD_SETEVENT,
449 CMD_SETLINEWIDTH,
450 CMD_SETSAMPLELOCATIONSEXT,
451 CMD_SETSCISSOR,
452 CMD_SETSTENCILCOMPAREMASK,
453 CMD_SETSTENCILREFERENCE,
454 CMD_SETSTENCILWRITEMASK,
455 CMD_SETVIEWPORT,
456 CMD_SETVIEWPORTWSCALINGNV,
457 CMD_UPDATEBUFFER,
458 CMD_WAITEVENTS,
459 CMD_WRITETIMESTAMP,
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600460};
461
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600462enum CB_STATE {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700463 CB_NEW, // Newly created CB w/o any cmds
464 CB_RECORDING, // BeginCB has been called on this CB
465 CB_RECORDED, // EndCB has been called on this CB
466 CB_INVALID_COMPLETE, // had a complete recording, but was since invalidated
467 CB_INVALID_INCOMPLETE, // fouled before recording was completed
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600468};
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600469
470// CB Status -- used to track status of various bindings on cmd buffer objects
471typedef VkFlags CBStatusFlags;
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600472enum CBStatusFlagBits {
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600473 // clang-format off
474 CBSTATUS_NONE = 0x00000000, // No status is set
Chris Forbesb2ba95b2016-09-16 17:11:50 +1200475 CBSTATUS_LINE_WIDTH_SET = 0x00000001, // Line width has been set
476 CBSTATUS_DEPTH_BIAS_SET = 0x00000002, // Depth bias has been set
477 CBSTATUS_BLEND_CONSTANTS_SET = 0x00000004, // Blend constants state has been set
478 CBSTATUS_DEPTH_BOUNDS_SET = 0x00000008, // Depth bounds state object has been set
479 CBSTATUS_STENCIL_READ_MASK_SET = 0x00000010, // Stencil read mask has been set
480 CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020, // Stencil write mask has been set
481 CBSTATUS_STENCIL_REFERENCE_SET = 0x00000040, // Stencil reference has been set
Chris Forbes84401622017-08-16 12:56:01 -0700482 CBSTATUS_VIEWPORT_SET = 0x00000080,
483 CBSTATUS_SCISSOR_SET = 0x00000100,
484 CBSTATUS_INDEX_BUFFER_BOUND = 0x00000200, // Index buffer has been set
485 CBSTATUS_ALL_STATE_SET = 0x000001FF, // All state set (intentionally exclude index buffer)
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600486 // clang-format on
Mark Lobodzinski0978f5f2016-05-19 17:23:38 -0600487};
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600488
Mark Lobodzinski6dc35f92017-03-07 16:37:16 -0700489struct TEMPLATE_STATE {
490 VkDescriptorUpdateTemplateKHR desc_update_template;
Mark Lobodzinskief4da502017-09-28 15:18:18 -0600491 safe_VkDescriptorUpdateTemplateCreateInfo create_info;
Mark Lobodzinski6dc35f92017-03-07 16:37:16 -0700492
Mark Lobodzinskief4da502017-09-28 15:18:18 -0600493 TEMPLATE_STATE(VkDescriptorUpdateTemplateKHR update_template, safe_VkDescriptorUpdateTemplateCreateInfo *pCreateInfo)
Mark Lobodzinski6dc35f92017-03-07 16:37:16 -0700494 : desc_update_template(update_template), create_info(*pCreateInfo) {}
495};
496
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600497struct QueryObject {
498 VkQueryPool pool;
499 uint32_t index;
500};
501
502inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
503 return (query1.pool == query2.pool && query1.index == query2.index);
504}
505
506namespace std {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700507template <>
508struct hash<QueryObject> {
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600509 size_t operator()(QueryObject query) const throw() {
510 return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
511 }
512};
Dave Houltona9df0ce2018-02-07 10:51:23 -0700513} // namespace std
Mark Lobodzinski729a8d32017-01-26 12:16:30 -0700514struct DRAW_DATA {
515 std::vector<VkBuffer> buffers;
516};
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600517
518struct ImageSubresourcePair {
519 VkImage image;
520 bool hasSubresource;
521 VkImageSubresource subresource;
522};
523
524inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700525 if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource) return false;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600526 return !img1.hasSubresource ||
527 (img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel &&
528 img1.subresource.arrayLayer == img2.subresource.arrayLayer);
529}
530
531namespace std {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700532template <>
533struct hash<ImageSubresourcePair> {
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600534 size_t operator()(ImageSubresourcePair img) const throw() {
535 size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image));
536 hashVal ^= hash<bool>()(img.hasSubresource);
537 if (img.hasSubresource) {
538 hashVal ^= hash<uint32_t>()(reinterpret_cast<uint32_t &>(img.subresource.aspectMask));
539 hashVal ^= hash<uint32_t>()(img.subresource.mipLevel);
540 hashVal ^= hash<uint32_t>()(img.subresource.arrayLayer);
541 }
542 return hashVal;
543 }
544};
Dave Houltona9df0ce2018-02-07 10:51:23 -0700545} // namespace std
Tobin Ehlisad4a2da2016-05-17 08:01:41 -0600546
John Zulauf34ebf272018-02-16 13:08:47 -0700547// Canonical dictionary for PushConstantRanges
548using PushConstantRangesDict = hash_util::Dictionary<PushConstantRanges>;
549using PushConstantRangesId = PushConstantRangesDict::Id;
550
551// Canonical dictionary for the pipeline layout's layout of descriptorsetlayouts
552using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
553using DescriptorSetLayoutId = std::shared_ptr<const DescriptorSetLayoutDef>;
John Zulaufdf3c5c12018-03-06 16:44:43 -0700554using PipelineLayoutSetLayoutsDef = std::vector<DescriptorSetLayoutId>;
555using PipelineLayoutSetLayoutsDict =
556 hash_util::Dictionary<PipelineLayoutSetLayoutsDef, hash_util::IsOrderedContainer<PipelineLayoutSetLayoutsDef>>;
557using PipelineLayoutSetLayoutsId = PipelineLayoutSetLayoutsDict::Id;
John Zulauf34ebf272018-02-16 13:08:47 -0700558
559// Defines/stores a compatibility defintion for set N
560// The "layout layout" must store at least set+1 entries, but only the first set+1 are considered for hash and equality testing
561// Note: the "cannonical" data are referenced by Id, not including handle or device specific state
562// Note: hash and equality only consider layout_id entries [0, set] for determining uniqueness
563struct PipelineLayoutCompatDef {
564 uint32_t set;
565 PushConstantRangesId push_constant_ranges;
John Zulaufdf3c5c12018-03-06 16:44:43 -0700566 PipelineLayoutSetLayoutsId set_layouts_id;
567 PipelineLayoutCompatDef(const uint32_t set_index, const PushConstantRangesId pcr_id, const PipelineLayoutSetLayoutsId sl_id)
568 : set(set_index), push_constant_ranges(pcr_id), set_layouts_id(sl_id) {}
John Zulauf34ebf272018-02-16 13:08:47 -0700569 size_t hash() const;
570 bool operator==(const PipelineLayoutCompatDef &other) const;
571};
572
573// Canonical dictionary for PipelineLayoutCompat records
574using PipelineLayoutCompatDict = hash_util::Dictionary<PipelineLayoutCompatDef, hash_util::HasHashMember<PipelineLayoutCompatDef>>;
575using PipelineLayoutCompatId = PipelineLayoutCompatDict::Id;
John Zulauff0d06392018-02-16 13:07:24 -0700576
Tobin Ehlis0fc85672016-07-07 11:06:26 -0600577// Store layouts and pushconstants for PipelineLayout
578struct PIPELINE_LAYOUT_NODE {
579 VkPipelineLayout layout;
Tobin Ehlisa8e46e72017-06-21 10:16:10 -0600580 std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts;
John Zulauff0d06392018-02-16 13:07:24 -0700581 PushConstantRangesId push_constant_ranges;
John Zulauf34ebf272018-02-16 13:08:47 -0700582 std::vector<PipelineLayoutCompatId> compat_for_set;
Tobin Ehlis0fc85672016-07-07 11:06:26 -0600583
John Zulauf34ebf272018-02-16 13:08:47 -0700584 PIPELINE_LAYOUT_NODE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {}
Tobin Ehlis0fc85672016-07-07 11:06:26 -0600585
586 void reset() {
587 layout = VK_NULL_HANDLE;
588 set_layouts.clear();
John Zulauff0d06392018-02-16 13:07:24 -0700589 push_constant_ranges.reset();
John Zulauf34ebf272018-02-16 13:08:47 -0700590 compat_for_set.clear();
Tobin Ehlis0fc85672016-07-07 11:06:26 -0600591 }
592};
593
Tobin Ehlis52c76a32016-10-12 09:05:51 -0600594class PIPELINE_STATE : public BASE_NODE {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700595 public:
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600596 VkPipeline pipeline;
597 safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
Tobin Ehlis7f316d02017-09-18 08:38:37 -0600598 // Hold shared ptr to RP in case RP itself is destroyed
599 std::shared_ptr<RENDER_PASS_STATE> rp_state;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600600 safe_VkComputePipelineCreateInfo computePipelineCI;
601 // Flag of which shader stages are active for this pipeline
602 uint32_t active_shaders;
603 uint32_t duplicate_shaders;
604 // Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
Tobin Ehliscebc4c02016-08-22 10:10:43 -0600605 std::unordered_map<uint32_t, std::map<uint32_t, descriptor_req>> active_slots;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600606 // Vtx input info (if any)
607 std::vector<VkVertexInputBindingDescription> vertexBindingDescriptions;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600608 std::vector<VkPipelineColorBlendAttachmentState> attachments;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700609 bool blendConstantsEnabled; // Blend constants enabled for any attachments
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600610 PIPELINE_LAYOUT_NODE pipeline_layout;
Chris Forbes0771b672018-03-22 21:13:46 -0700611 VkPrimitiveTopology topology_at_rasterizer;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600612
613 // Default constructor
Tobin Ehlis52c76a32016-10-12 09:05:51 -0600614 PIPELINE_STATE()
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700615 : pipeline{},
616 graphicsPipelineCI{},
Tobin Ehlis7f316d02017-09-18 08:38:37 -0600617 rp_state(nullptr),
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700618 computePipelineCI{},
619 active_shaders(0),
620 duplicate_shaders(0),
621 active_slots(),
622 vertexBindingDescriptions(),
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700623 attachments(),
624 blendConstantsEnabled(false),
Chris Forbes0771b672018-03-22 21:13:46 -0700625 pipeline_layout(),
626 topology_at_rasterizer{} {}
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600627
Tobin Ehlis7f316d02017-09-18 08:38:37 -0600628 void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate) {
Petr Krause91f7a12017-12-14 20:57:36 +0100629 bool uses_color_attachment = false;
630 bool uses_depthstencil_attachment = false;
631 if (pCreateInfo->subpass < rpstate->createInfo.subpassCount) {
632 const auto &subpass = rpstate->createInfo.pSubpasses[pCreateInfo->subpass];
633
634 for (uint32_t i = 0; i < subpass.colorAttachmentCount; ++i) {
635 if (subpass.pColorAttachments[i].attachment != VK_ATTACHMENT_UNUSED) {
636 uses_color_attachment = true;
637 break;
638 }
639 }
640
641 if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
642 uses_depthstencil_attachment = true;
643 }
644 }
645 graphicsPipelineCI.initialize(pCreateInfo, uses_color_attachment, uses_depthstencil_attachment);
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600646 // Make sure compute pipeline is null
647 VkComputePipelineCreateInfo emptyComputeCI = {};
648 computePipelineCI.initialize(&emptyComputeCI);
649 for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
650 const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
651 this->duplicate_shaders |= this->active_shaders & pPSSCI->stage;
652 this->active_shaders |= pPSSCI->stage;
653 }
Petr Krause91f7a12017-12-14 20:57:36 +0100654 if (graphicsPipelineCI.pVertexInputState) {
655 const auto pVICI = graphicsPipelineCI.pVertexInputState;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600656 if (pVICI->vertexBindingDescriptionCount) {
657 this->vertexBindingDescriptions = std::vector<VkVertexInputBindingDescription>(
658 pVICI->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions + pVICI->vertexBindingDescriptionCount);
659 }
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600660 }
Petr Krause91f7a12017-12-14 20:57:36 +0100661 if (graphicsPipelineCI.pColorBlendState) {
662 const auto pCBCI = graphicsPipelineCI.pColorBlendState;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600663 if (pCBCI->attachmentCount) {
664 this->attachments = std::vector<VkPipelineColorBlendAttachmentState>(pCBCI->pAttachments,
665 pCBCI->pAttachments + pCBCI->attachmentCount);
666 }
667 }
Chris Forbes0771b672018-03-22 21:13:46 -0700668 if (graphicsPipelineCI.pInputAssemblyState) {
669 topology_at_rasterizer = graphicsPipelineCI.pInputAssemblyState->topology;
670 }
Tobin Ehlis7f316d02017-09-18 08:38:37 -0600671 rp_state = rpstate;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600672 }
Chris Forbesebdd9972017-08-15 11:09:13 -0700673
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600674 void initComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo) {
675 computePipelineCI.initialize(pCreateInfo);
676 // Make sure gfx pipeline is null
677 VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
Petr Krause91f7a12017-12-14 20:57:36 +0100678 graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600679 switch (computePipelineCI.stage.stage) {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700680 case VK_SHADER_STAGE_COMPUTE_BIT:
681 this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
682 break;
683 default:
684 // TODO : Flag error
685 break;
Tobin Ehliseb00b0d2016-08-17 07:55:55 -0600686 }
687 }
688};
689
Tobin Ehlis09d16712016-05-17 10:41:55 -0600690// Track last states that are bound per pipeline bind point (Gfx & Compute)
691struct LAST_BOUND_STATE {
Tobin Ehlis52c76a32016-10-12 09:05:51 -0600692 PIPELINE_STATE *pipeline_state;
John Zulaufaee1d532018-02-16 13:09:39 -0700693 VkPipelineLayout pipeline_layout;
Tobin Ehlis09d16712016-05-17 10:41:55 -0600694 // Track each set that has been bound
Tobin Ehlis09d16712016-05-17 10:41:55 -0600695 // Ordered bound set tracking where index is set# that given set is bound to
696 std::vector<cvdescriptorset::DescriptorSet *> boundDescriptorSets;
Józef Kuciaf0c94d42017-10-25 22:15:22 +0200697 std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
Tobin Ehlis09d16712016-05-17 10:41:55 -0600698 // one dynamic offset per dynamic descriptor bound to this CB
699 std::vector<std::vector<uint32_t>> dynamicOffsets;
John Zulaufaee1d532018-02-16 13:09:39 -0700700 std::vector<PipelineLayoutCompatId> compat_id_for_set;
Tobin Ehlis09d16712016-05-17 10:41:55 -0600701
702 void reset() {
Tobin Ehlis52c76a32016-10-12 09:05:51 -0600703 pipeline_state = nullptr;
John Zulaufaee1d532018-02-16 13:09:39 -0700704 pipeline_layout = VK_NULL_HANDLE;
Tobin Ehlis09d16712016-05-17 10:41:55 -0600705 boundDescriptorSets.clear();
Józef Kuciaf0c94d42017-10-25 22:15:22 +0200706 push_descriptor_set = nullptr;
Tobin Ehlis09d16712016-05-17 10:41:55 -0600707 dynamicOffsets.clear();
708 }
709};
710// Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600711struct GLOBAL_CB_NODE : public BASE_NODE {
712 VkCommandBuffer commandBuffer;
Chris Forbesb2b43482017-06-06 16:05:26 -0700713 VkCommandBufferAllocateInfo createInfo = {};
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600714 VkCommandBufferBeginInfo beginInfo;
715 VkCommandBufferInheritanceInfo inheritanceInfo;
Dave Houltona9df0ce2018-02-07 10:51:23 -0700716 VkDevice device; // device this CB belongs to
Chris Forbes05375e72017-04-21 13:15:15 -0700717 bool hasDrawCmd;
Dave Houltona9df0ce2018-02-07 10:51:23 -0700718 CB_STATE state; // Track cmd buffer update state
719 uint64_t submitCount; // Number of times CB has been submitted
John Zulauf48a6a702017-12-22 17:14:54 -0700720 typedef uint64_t ImageLayoutUpdateCount;
721 ImageLayoutUpdateCount image_layout_change_count; // The sequence number for changes to image layout (for cached validation)
Dave Houltona9df0ce2018-02-07 10:51:23 -0700722 CBStatusFlags status; // Track status of various bindings on cmd buffer
723 CBStatusFlags static_status; // All state bits provided by current graphics pipeline
724 // rather than dynamic state
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600725 // Currently storing "lastBound" objects on per-CB basis
726 // long-term may want to create caches of "lastBound" states and could have
727 // each individual CMD_NODE referencing its own "lastBound" state
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600728 // Store last bound state for Gfx & Compute pipeline bind points
729 LAST_BOUND_STATE lastBound[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
730
Chris Forbes5fc77832016-07-28 14:15:38 +1200731 uint32_t viewportMask;
732 uint32_t scissorMask;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600733 VkRenderPassBeginInfo activeRenderPassBeginInfo;
Tobin Ehlis95ccf3e2016-10-12 15:24:03 -0600734 RENDER_PASS_STATE *activeRenderPass;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600735 VkSubpassContents activeSubpassContents;
736 uint32_t activeSubpass;
737 VkFramebuffer activeFramebuffer;
738 std::unordered_set<VkFramebuffer> framebuffers;
Tobin Ehlis96f1d602016-07-08 12:33:45 -0600739 // Unified data structs to track objects bound to this command buffer as well as object
740 // dependencies that have been broken : either destroyed objects, or updated descriptor sets
741 std::unordered_set<VK_OBJECT> object_bindings;
Tobin Ehlis2556f5b2016-06-24 17:22:16 -0600742 std::vector<VK_OBJECT> broken_bindings;
Tobin Ehlis96f1d602016-07-08 12:33:45 -0600743
Michael Lentine860b0fe2016-05-20 10:14:00 -0500744 std::unordered_set<VkEvent> waitedEvents;
745 std::vector<VkEvent> writeEventsBeforeWait;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600746 std::vector<VkEvent> events;
Michael Lentine860b0fe2016-05-20 10:14:00 -0500747 std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700748 std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600749 std::unordered_set<QueryObject> activeQueries;
750 std::unordered_set<QueryObject> startedQueries;
751 std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600752 std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
753 std::vector<DRAW_DATA> drawData;
754 DRAW_DATA currentDrawData;
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700755 bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600756 VkCommandBuffer primaryCommandBuffer;
757 // Track images and buffers that are updated by this CB at the point of a draw
758 std::unordered_set<VkImageView> updateImages;
759 std::unordered_set<VkBuffer> updateBuffers;
Chris Forbes390578a2017-05-18 15:52:12 -0700760 // If primary, the secondary command buffers we will call.
761 // If secondary, the primary command buffers we will be called by.
762 std::unordered_set<GLOBAL_CB_NODE *> linkedCommandBuffers;
Tobin Ehlisa17a5292017-07-28 12:11:30 -0600763 // Validation functions run at primary CB queue submit time
764 std::vector<std::function<bool()>> queue_submit_functions;
Tobin Ehlis2d44ca72017-07-27 11:08:00 -0600765 // Validation functions run when secondary CB is executed in primary
Tobin Ehlis37ec75a2018-03-12 11:26:39 -0600766 std::vector<std::function<bool(GLOBAL_CB_NODE *, VkFramebuffer)>> cmd_execute_commands_functions;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600767 std::unordered_set<VkDeviceMemory> memObjs;
768 std::vector<std::function<bool(VkQueue)>> eventUpdates;
Michael Lentine5627e692016-05-20 17:45:02 -0500769 std::vector<std::function<bool(VkQueue)>> queryUpdates;
John Zulauf48a6a702017-12-22 17:14:54 -0700770 std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets;
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600771};
Chris Forbese2b0ade2016-06-09 09:40:55 +1200772
Chris Forbes8320a8d2016-08-01 15:15:30 +1200773struct SEMAPHORE_WAIT {
774 VkSemaphore semaphore;
775 VkQueue queue;
776 uint64_t seq;
777};
778
Chris Forbes226c1de2016-06-10 12:43:13 +1200779struct CB_SUBMISSION {
Mark Lobodzinski729a8d32017-01-26 12:16:30 -0700780 CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores,
Mike Schuchardt77882362017-07-07 08:31:59 -0600781 std::vector<VkSemaphore> const &signalSemaphores, std::vector<VkSemaphore> const &externalSemaphores,
782 VkFence fence)
783 : cbs(cbs),
784 waitSemaphores(waitSemaphores),
785 signalSemaphores(signalSemaphores),
786 externalSemaphores(externalSemaphores),
787 fence(fence) {}
Chris Forbes226c1de2016-06-10 12:43:13 +1200788
Chris Forbesc7d3c782016-06-22 11:57:17 +1200789 std::vector<VkCommandBuffer> cbs;
Chris Forbes8320a8d2016-08-01 15:15:30 +1200790 std::vector<SEMAPHORE_WAIT> waitSemaphores;
791 std::vector<VkSemaphore> signalSemaphores;
Mike Schuchardt77882362017-07-07 08:31:59 -0600792 std::vector<VkSemaphore> externalSemaphores;
Chris Forbes8320a8d2016-08-01 15:15:30 +1200793 VkFence fence;
Chris Forbes226c1de2016-06-10 12:43:13 +1200794};
795
Mark Lobodzinskic7daa8f2017-01-17 09:14:36 -0700796struct IMAGE_LAYOUT_NODE {
797 VkImageLayout layout;
798 VkFormat format;
799};
800
Mark Lobodzinski9ef5d562017-01-27 12:28:30 -0700801// CHECK_DISABLED struct is a container for bools that can block validation checks from being performed.
802// The end goal is to have all checks guarded by a bool. The bools are all "false" by default meaning that all checks
803// are enabled. At CreateInstance time, the user can use the VK_EXT_validation_flags extension to pass in enum values
804// of VkValidationCheckEXT that will selectively disable checks.
805struct CHECK_DISABLED {
806 bool command_buffer_state;
807 bool create_descriptor_set_layout;
808 bool destroy_buffer_view; // Skip validation at DestroyBufferView time
809 bool destroy_image_view; // Skip validation at DestroyImageView time
810 bool destroy_pipeline; // Skip validation at DestroyPipeline time
811 bool destroy_descriptor_pool; // Skip validation at DestroyDescriptorPool time
812 bool destroy_framebuffer; // Skip validation at DestroyFramebuffer time
813 bool destroy_renderpass; // Skip validation at DestroyRenderpass time
814 bool destroy_image; // Skip validation at DestroyImage time
815 bool destroy_sampler; // Skip validation at DestroySampler time
816 bool destroy_command_pool; // Skip validation at DestroyCommandPool time
817 bool destroy_event; // Skip validation at DestroyEvent time
818 bool free_memory; // Skip validation at FreeMemory time
819 bool object_in_use; // Skip all object in_use checking
820 bool idle_descriptor_set; // Skip check to verify that descriptor set is no in-use
821 bool push_constant_range; // Skip push constant range checks
822 bool free_descriptor_sets; // Skip validation prior to vkFreeDescriptorSets()
823 bool allocate_descriptor_sets; // Skip validation prior to vkAllocateDescriptorSets()
824 bool update_descriptor_sets; // Skip validation prior to vkUpdateDescriptorSets()
825 bool wait_for_fences;
826 bool get_fence_state;
827 bool queue_wait_idle;
828 bool device_wait_idle;
829 bool destroy_fence;
830 bool destroy_semaphore;
831 bool destroy_query_pool;
832 bool get_query_pool_results;
833 bool destroy_buffer;
Dave Houltona9df0ce2018-02-07 10:51:23 -0700834 bool shader_validation; // Skip validation for shaders
Tobin Ehlisf320b192017-03-14 11:22:50 -0600835
836 void SetAll(bool value) { std::fill(&command_buffer_state, &shader_validation + 1, value); }
Mark Lobodzinski9ef5d562017-01-27 12:28:30 -0700837};
838
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700839struct MT_FB_ATTACHMENT_INFO {
840 IMAGE_VIEW_STATE *view_state;
841 VkImage image;
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700842};
843
844class FRAMEBUFFER_STATE : public BASE_NODE {
Dave Houltona9df0ce2018-02-07 10:51:23 -0700845 public:
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700846 VkFramebuffer framebuffer;
847 safe_VkFramebufferCreateInfo createInfo;
Tobin Ehlisbe9e5d72017-09-07 14:24:36 -0600848 std::shared_ptr<RENDER_PASS_STATE> rp_state;
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700849 std::vector<MT_FB_ATTACHMENT_INFO> attachments;
Tobin Ehlisbe9e5d72017-09-07 14:24:36 -0600850 FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate)
Tobin Ehlisbb2039f2017-09-06 10:46:41 -0600851 : framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){};
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700852};
853
Chris Forbes47567b72017-06-09 12:09:45 -0700854struct shader_module;
855struct DeviceExtensions;
856
Tobin Ehlisf0606de2016-07-20 13:27:33 -0600857// Fwd declarations of layer_data and helpers to look-up/validate state from layer_data maps
Tobin Ehlis94bc5d22016-06-02 07:46:52 -0600858namespace core_validation {
859struct layer_data;
Tobin Ehlisb2e1e2c2017-02-08 09:16:32 -0700860cvdescriptorset::DescriptorSet *GetSetNode(const layer_data *, VkDescriptorSet);
Tobin Ehlisa8e46e72017-06-21 10:16:10 -0600861std::shared_ptr<cvdescriptorset::DescriptorSetLayout const> const GetDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout);
Tobin Ehlisb2e1e2c2017-02-08 09:16:32 -0700862DESCRIPTOR_POOL_STATE *GetDescriptorPoolState(const layer_data *, const VkDescriptorPool);
863BUFFER_STATE *GetBufferState(const layer_data *, VkBuffer);
864IMAGE_STATE *GetImageState(const layer_data *, VkImage);
865DEVICE_MEM_INFO *GetMemObjInfo(const layer_data *, VkDeviceMemory);
866BUFFER_VIEW_STATE *GetBufferViewState(const layer_data *, VkBufferView);
867SAMPLER_STATE *GetSamplerState(const layer_data *, VkSampler);
868IMAGE_VIEW_STATE *GetImageViewState(const layer_data *, VkImageView);
Tobin Ehlisb2e1e2c2017-02-08 09:16:32 -0700869SWAPCHAIN_NODE *GetSwapchainNode(const layer_data *, VkSwapchainKHR);
870GLOBAL_CB_NODE *GetCBNode(layer_data const *my_data, const VkCommandBuffer cb);
Tobin Ehlis8ca73942017-09-07 09:16:49 -0600871RENDER_PASS_STATE *GetRenderPassState(layer_data const *dev_data, VkRenderPass renderpass);
Tobin Ehlisbe9e5d72017-09-07 14:24:36 -0600872std::shared_ptr<RENDER_PASS_STATE> GetRenderPassStateSharedPtr(layer_data const *dev_data, VkRenderPass renderpass);
Tobin Ehlisb2e1e2c2017-02-08 09:16:32 -0700873FRAMEBUFFER_STATE *GetFramebufferState(const layer_data *my_data, VkFramebuffer framebuffer);
Mark Lobodzinskiab9be282017-02-09 12:01:27 -0700874COMMAND_POOL_NODE *GetCommandPoolNode(layer_data *dev_data, VkCommandPool pool);
Chris Forbes47567b72017-06-09 12:09:45 -0700875shader_module const *GetShaderModuleState(layer_data const *dev_data, VkShaderModule module);
Mark Lobodzinskiefd933b2017-02-10 12:09:23 -0700876const PHYS_DEV_PROPERTIES_NODE *GetPhysDevProperties(const layer_data *device_data);
Mark Lobodzinskid678dcc2017-03-13 09:25:44 -0600877const VkPhysicalDeviceFeatures *GetEnabledFeatures(const layer_data *device_data);
Chris Forbes47567b72017-06-09 12:09:45 -0700878const DeviceExtensions *GetEnabledExtensions(const layer_data *device_data);
Mark Lobodzinski3c0f6362017-02-01 13:35:48 -0700879
Tobin Ehlisab294d82016-11-21 15:23:51 -0700880void invalidateCommandBuffers(const layer_data *, std::unordered_set<GLOBAL_CB_NODE *> const &, VK_OBJECT);
Tobin Ehlise1995fc2016-12-22 12:45:09 -0700881bool ValidateMemoryIsBoundToBuffer(const layer_data *, const BUFFER_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE);
882bool ValidateMemoryIsBoundToImage(const layer_data *, const IMAGE_STATE *, const char *, UNIQUE_VALIDATION_ERROR_CODE);
Tobin Ehlisfad7adf2016-10-20 06:50:37 -0600883void AddCommandBufferBindingSampler(GLOBAL_CB_NODE *, SAMPLER_STATE *);
Tobin Ehlis30df15c2016-10-12 17:17:57 -0600884void AddCommandBufferBindingImage(const layer_data *, GLOBAL_CB_NODE *, IMAGE_STATE *);
Tobin Ehlis15b8ea02016-09-19 14:02:58 -0600885void AddCommandBufferBindingImageView(const layer_data *, GLOBAL_CB_NODE *, IMAGE_VIEW_STATE *);
Tobin Ehlis4668dce2016-11-16 09:30:23 -0700886void AddCommandBufferBindingBuffer(const layer_data *, GLOBAL_CB_NODE *, BUFFER_STATE *);
Tobin Ehlis2515c0e2016-09-28 07:12:28 -0600887void AddCommandBufferBindingBufferView(const layer_data *, GLOBAL_CB_NODE *, BUFFER_VIEW_STATE *);
Mike Schuchardta5025652017-09-27 14:56:21 -0600888bool ValidateObjectNotInUse(const layer_data *dev_data, BASE_NODE *obj_node, VK_OBJECT obj_struct, const char *caller_name,
889 UNIQUE_VALIDATION_ERROR_CODE error_code);
Mark Lobodzinski9ef5d562017-01-27 12:28:30 -0700890void invalidateCommandBuffers(const layer_data *dev_data, std::unordered_set<GLOBAL_CB_NODE *> const &cb_nodes, VK_OBJECT obj);
891void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info);
Mark Lobodzinski306441e2017-02-10 13:48:38 -0700892void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEM_INFO *mem_info);
Mark Lobodzinski33826372017-04-13 11:10:11 -0600893bool ClearMemoryObjectBindings(layer_data *dev_data, uint64_t handle, VulkanObjectType type);
Tobin Ehlis051a65f2017-07-11 11:24:22 -0600894bool ValidateCmdQueueFlags(layer_data *dev_data, const GLOBAL_CB_NODE *cb_node, const char *caller_name, VkQueueFlags flags,
Mike Schuchardt9c582402017-02-23 15:57:37 -0700895 UNIQUE_VALIDATION_ERROR_CODE error_code);
Tobin Ehlis051a65f2017-07-11 11:24:22 -0600896bool ValidateCmd(layer_data *my_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd, const char *caller_name);
Dave Houltona9df0ce2018-02-07 10:51:23 -0700897bool insideRenderPass(const layer_data *my_data, const GLOBAL_CB_NODE *pCB, const char *apiName,
898 UNIQUE_VALIDATION_ERROR_CODE msgCode);
Mark Lobodzinskid81d1012017-02-01 09:03:06 -0700899void SetImageMemoryValid(layer_data *dev_data, IMAGE_STATE *image_state, bool valid);
Mark Lobodzinski2def2bf2017-02-02 15:22:50 -0700900bool outsideRenderPass(const layer_data *my_data, GLOBAL_CB_NODE *pCB, const char *apiName, UNIQUE_VALIDATION_ERROR_CODE msgCode);
Tobin Ehlisc3b6c4c2017-02-02 17:26:40 -0700901void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const IMAGE_CMD_BUF_LAYOUT_NODE &node);
902void SetLayout(GLOBAL_CB_NODE *pCB, ImageSubresourcePair imgpair, const VkImageLayout &layout);
Mark Lobodzinski8e0c0bf2017-02-06 11:06:26 -0700903bool ValidateImageMemoryIsValid(layer_data *dev_data, IMAGE_STATE *image_state, const char *functionName);
904bool ValidateImageSampleCount(layer_data *dev_data, IMAGE_STATE *image_state, VkSampleCountFlagBits sample_count,
905 const char *location, UNIQUE_VALIDATION_ERROR_CODE msgCode);
Mark Lobodzinski08f14fa2017-02-07 17:20:06 -0700906bool rangesIntersect(layer_data const *dev_data, MEMORY_RANGE const *range1, VkDeviceSize offset, VkDeviceSize end);
Mark Lobodzinski680421d2017-02-09 13:06:56 -0700907bool ValidateBufferMemoryIsValid(layer_data *dev_data, BUFFER_STATE *buffer_state, const char *functionName);
908void SetBufferMemoryValid(layer_data *dev_data, BUFFER_STATE *buffer_state, bool valid);
Mark Lobodzinskid2b2f612017-02-15 13:45:18 -0700909bool ValidateCmdSubpassState(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd_type);
John Zulauf5c2750c2018-01-30 15:04:56 -0700910bool ValidateCmd(layer_data *dev_data, const GLOBAL_CB_NODE *cb_state, const CMD_TYPE cmd, const char *caller_name);
Mark Lobodzinskid81d1012017-02-01 09:03:06 -0700911
Mark Lobodzinski90224de2017-01-26 15:23:11 -0700912// Prototypes for layer_data accessor functions. These should be in their own header file at some point
Jeremy Kniager7ec550f2017-08-16 14:57:42 -0600913VkFormatProperties GetFormatProperties(core_validation::layer_data *device_data, VkFormat format);
Dave Houlton130c0212018-01-29 13:39:56 -0700914VkResult GetImageFormatProperties(core_validation::layer_data *device_data, const VkImageCreateInfo *image_ci,
915 VkImageFormatProperties *image_format_properties);
Tobin Ehlisf320b192017-03-14 11:22:50 -0600916const debug_report_data *GetReportData(const layer_data *);
Mark Lobodzinski90224de2017-01-26 15:23:11 -0700917const VkPhysicalDeviceProperties *GetPhysicalDeviceProperties(layer_data *);
Mark Lobodzinski9ef5d562017-01-27 12:28:30 -0700918const CHECK_DISABLED *GetDisables(layer_data *);
919std::unordered_map<VkImage, std::unique_ptr<IMAGE_STATE>> *GetImageMap(core_validation::layer_data *);
920std::unordered_map<VkImage, std::vector<ImageSubresourcePair>> *GetImageSubresourceMap(layer_data *);
921std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> *GetImageLayoutMap(layer_data *);
Tobin Ehlisc8266452017-04-07 12:20:30 -0600922std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> const *GetImageLayoutMap(layer_data const *);
Mark Lobodzinski96210742017-02-09 10:33:46 -0700923std::unordered_map<VkBuffer, std::unique_ptr<BUFFER_STATE>> *GetBufferMap(layer_data *device_data);
924std::unordered_map<VkBufferView, std::unique_ptr<BUFFER_VIEW_STATE>> *GetBufferViewMap(layer_data *device_data);
Mark Lobodzinski602de982017-02-09 11:01:33 -0700925std::unordered_map<VkImageView, std::unique_ptr<IMAGE_VIEW_STATE>> *GetImageViewMap(layer_data *device_data);
Chris Forbes3fdf41f2017-05-02 14:32:26 -0700926const DeviceExtensions *GetDeviceExtensions(const layer_data *);
Dave Houltona9df0ce2018-02-07 10:51:23 -0700927} // namespace core_validation
Tobin Ehlis8481f4d2016-05-17 08:01:41 -0600928
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700929#endif // CORE_VALIDATION_TYPES_H_