blob: cdf5ee0f36133880e56526e030cf25056f1d15a3 [file] [log] [blame]
Jeff Bolzfdd0d852019-02-03 21:55:12 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Mark Lobodzinski63902f02018-09-21 10:36:44 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Jon Ashburn <jon@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 */
22
Mark Lobodzinski63902f02018-09-21 10:36:44 -060023// Suppress unused warning on Linux
24#if defined(__GNUC__)
25#define DECORATE_UNUSED __attribute__((unused))
26#else
27#define DECORATE_UNUSED
28#endif
29
30// clang-format off
31static const char DECORATE_UNUSED *kVUID_ObjectTracker_Info = "UNASSIGNED-ObjectTracker-Info";
32static const char DECORATE_UNUSED *kVUID_ObjectTracker_InternalError = "UNASSIGNED-ObjectTracker-InternalError";
33static const char DECORATE_UNUSED *kVUID_ObjectTracker_ObjectLeak = "UNASSIGNED-ObjectTracker-ObjectLeak";
34static const char DECORATE_UNUSED *kVUID_ObjectTracker_UnknownObject = "UNASSIGNED-ObjectTracker-UnknownObject";
35// clang-format on
36
37#undef DECORATE_UNUSED
38
39extern uint64_t object_track_index;
40
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060041// Object Status -- used to track state of individual objects
42typedef VkFlags ObjectStatusFlags;
43enum ObjectStatusFlagBits {
44 OBJSTATUS_NONE = 0x00000000, // No status is set
45 OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
46 OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
47 OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
48 OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
49 OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
50 OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
51 OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
52 OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000080, // Allocated with custom allocator
53};
54
55// Object and state information structure
56struct ObjTrackState {
Jeff Bolzcf802bc2019-02-10 00:18:00 -060057 uint64_t handle; // Object handle (new)
58 VulkanObjectType object_type; // Object type identifier
59 ObjectStatusFlags status; // Object state
60 uint64_t parent_object; // Parent object
61 std::unique_ptr<std::unordered_set<uint64_t> > child_objects; // Child objects (used for VkDescriptorPool only)
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060062};
63
64// Track Queue information
65struct ObjTrackQueueInfo {
66 uint32_t queue_node_index;
67 VkQueue queue;
68};
69
70typedef std::unordered_map<uint64_t, ObjTrackState *> object_map_type;
71
72class ObjectLifetimes : public ValidationObject {
Mark Lobodzinski0c668462018-09-27 10:13:19 -060073 public:
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060074 uint64_t num_objects[kVulkanObjectTypeMax + 1];
75 uint64_t num_total_objects;
76 // Vector of unordered_maps per object type to hold ObjTrackState info
77 std::vector<object_map_type> object_map;
78 // Special-case map for swapchain images
79 std::unordered_map<uint64_t, ObjTrackState *> swapchainImageMap;
80 // Map of queue information structures, one per queue
81 std::unordered_map<VkQueue, ObjTrackQueueInfo *> queue_info_map;
82
83 std::vector<VkQueueFamilyProperties> queue_family_properties;
84
85 // Constructor for object lifetime tracking
86 ObjectLifetimes() : num_objects{}, num_total_objects(0), object_map{} { object_map.resize(kVulkanObjectTypeMax + 1); }
Mark Lobodzinski63902f02018-09-21 10:36:44 -060087
Mark Lobodzinski0c668462018-09-27 10:13:19 -060088 bool DeviceReportUndestroyedObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code);
89 void DeviceDestroyUndestroyedObjects(VkDevice device, VulkanObjectType object_type);
90 void CreateQueue(VkDevice device, VkQueue vkObj);
91 void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue);
92 void ValidateQueueFlags(VkQueue queue, const char *function);
93 void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
94 VkCommandBufferLevel level);
95 void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
96 void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain);
97 bool ReportUndestroyedObjects(VkDevice device, const std::string &error_code);
98 void DestroyUndestroyedObjects(VkDevice device);
Jeff Bolzfdd0d852019-02-03 21:55:12 -060099 bool ValidateDeviceObject(uint64_t device_handle, const char *invalid_handle_code, const char *wrong_device_code);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600100 void DestroyQueueDataStructures(VkDevice device);
101 bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer);
102 bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
103 bool ValidateSamplerObjects(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo);
104 template <typename DispObj>
105 bool ValidateDescriptorWrite(DispObj disp, VkWriteDescriptorSet const *desc, bool isPush);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600106
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600107 ObjectLifetimes *GetObjectLifetimeData(std::vector<ValidationObject *> &object_dispatch) {
108 for (auto layer_object : object_dispatch) {
109 if (layer_object->container_type == LayerObjectTypeObjectTracker) {
110 return (reinterpret_cast<ObjectLifetimes *>(layer_object));
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600111 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600112 }
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600113 return nullptr;
114 };
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600115
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600116 template <typename T1, typename T2>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600117 bool ValidateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, bool null_allowed,
Jeff Bolzfdd0d852019-02-03 21:55:12 -0600118 const char *invalid_handle_code, const char *wrong_device_code) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600119 if (null_allowed && (object == VK_NULL_HANDLE)) {
120 return false;
121 }
122 auto object_handle = HandleToUint64(object);
123
124 if (object_type == kVulkanObjectTypeDevice) {
125 return ValidateDeviceObject(object_handle, invalid_handle_code, wrong_device_code);
126 }
127
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600128 VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600129
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600130 // Look for object in object map
131 if (object_map[object_type].find(object_handle) == object_map[object_type].end()) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600132 // If object is an image, also look for it in the swapchain image map
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600133 if ((object_type != kVulkanObjectTypeImage) || (swapchainImageMap.find(object_handle) == swapchainImageMap.end())) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600134 // Object not found, look for it in other device object maps
135 for (auto other_device_data : layer_data_map) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600136 for (auto layer_object_data : other_device_data.second->object_dispatch) {
137 if (layer_object_data->container_type == LayerObjectTypeObjectTracker) {
138 auto object_lifetime_data = reinterpret_cast<ObjectLifetimes *>(layer_object_data);
139 if (object_lifetime_data && (object_lifetime_data != this)) {
140 if (object_lifetime_data->object_map[object_type].find(object_handle) !=
141 object_lifetime_data->object_map[object_type].end() ||
142 (object_type == kVulkanObjectTypeImage &&
143 object_lifetime_data->swapchainImageMap.find(object_handle) !=
144 object_lifetime_data->swapchainImageMap.end())) {
145 // Object found on other device, report an error if object has a device parent error code
146 if ((wrong_device_code != kVUIDUndefined) && (object_type != kVulkanObjectTypeSurfaceKHR)) {
147 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
148 wrong_device_code,
149 "Object 0x%" PRIxLEAST64
150 " was not created, allocated or retrieved from the correct device.",
151 object_handle);
152 } else {
153 return false;
154 }
155 }
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600156 }
157 }
158 }
159 }
160 // Report an error if object was not found anywhere
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600161 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, invalid_handle_code,
162 "Invalid %s Object 0x%" PRIxLEAST64 ".", object_string[object_type], object_handle);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600163 }
164 }
165 return false;
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600166 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600167
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600168 template <typename T1, typename T2>
169 void CreateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator) {
170 uint64_t object_handle = HandleToUint64(object);
171 bool custom_allocator = (pAllocator != nullptr);
172 if (!object_map[object_type].count(object_handle)) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600173 VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
174 log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, debug_object_type, object_handle, kVUID_ObjectTracker_Info,
175 "OBJ[0x%" PRIxLEAST64 "] : CREATE %s object 0x%" PRIxLEAST64, object_track_index++, object_string[object_type],
176 object_handle);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600177
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600178 ObjTrackState *pNewObjNode = new ObjTrackState;
179 pNewObjNode->object_type = object_type;
180 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
181 pNewObjNode->handle = object_handle;
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600182
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600183 object_map[object_type][object_handle] = pNewObjNode;
184 num_objects[object_type]++;
185 num_total_objects++;
Jeff Bolzcf802bc2019-02-10 00:18:00 -0600186
187 if (object_type == kVulkanObjectTypeDescriptorPool) {
188 pNewObjNode->child_objects.reset(new std::unordered_set<uint64_t>);
189 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600190 }
191 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600192
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600193 template <typename T1>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600194 void DestroyObjectSilently(T1 object, VulkanObjectType object_type) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600195 auto object_handle = HandleToUint64(object);
196 assert(object_handle != VK_NULL_HANDLE);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600197
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600198 auto item = object_map[object_type].find(object_handle);
199 assert(item != object_map[object_type].end());
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600200
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600201 ObjTrackState *pNode = item->second;
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600202 assert(num_total_objects > 0);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600203
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600204 num_total_objects--;
205 assert(num_objects[pNode->object_type] > 0);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600206
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600207 num_objects[pNode->object_type]--;
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600208
209 delete pNode;
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600210 object_map[object_type].erase(item);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600211 }
212
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600213 template <typename T1, typename T2>
214 void RecordDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600215 auto object_handle = HandleToUint64(object);
216 if (object_handle != VK_NULL_HANDLE) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600217 auto item = object_map[object_type].find(object_handle);
218 if (item != object_map[object_type].end()) {
219 DestroyObjectSilently(object, object_type);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600220 }
221 }
222 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600223
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600224 template <typename T1, typename T2>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600225 bool ValidateDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type,
Jeff Bolzfdd0d852019-02-03 21:55:12 -0600226 const VkAllocationCallbacks *pAllocator, const char *expected_custom_allocator_code,
227 const char *expected_default_allocator_code) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600228 auto object_handle = HandleToUint64(object);
229 bool custom_allocator = pAllocator != nullptr;
230 VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
231 bool skip = false;
232
233 if (object_handle != VK_NULL_HANDLE) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600234 auto item = object_map[object_type].find(object_handle);
235 if (item != object_map[object_type].end()) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600236 ObjTrackState *pNode = item->second;
237 skip |= log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, debug_object_type, object_handle,
238 kVUID_ObjectTracker_Info,
239 "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600240 object_string[object_type], HandleToUint64(object), num_total_objects - 1,
241 num_objects[pNode->object_type] - 1, object_string[object_type]);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600242
243 auto allocated_with_custom = (pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
244 if (allocated_with_custom && !custom_allocator && expected_custom_allocator_code != kVUIDUndefined) {
245 // This check only verifies that custom allocation callbacks were provided to both Create and Destroy calls,
246 // it cannot verify that these allocation callbacks are compatible with each other.
247 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
248 expected_custom_allocator_code,
249 "Custom allocator not specified while destroying %s obj 0x%" PRIxLEAST64
250 " but specified at creation.",
251 object_string[object_type], object_handle);
252 } else if (!allocated_with_custom && custom_allocator && expected_default_allocator_code != kVUIDUndefined) {
253 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
254 expected_default_allocator_code,
255 "Custom allocator specified while destroying %s obj 0x%" PRIxLEAST64
256 " but not specified at creation.",
257 object_string[object_type], object_handle);
258 }
259 }
260 }
261 return skip;
262 }
263
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600264#include "object_tracker.h"
265};