blob: bc12f8c8529d031861c29267ff6186f3089a277e [file] [log] [blame]
Jeff Bolzfdd0d852019-02-03 21:55:12 -06001/* Copyright (c) 2015-2019 The Khronos Group Inc.
2 * Copyright (c) 2015-2019 Valve Corporation
3 * Copyright (c) 2015-2019 LunarG, Inc.
4 * Copyright (C) 2015-2019 Google Inc.
Mark Lobodzinski63902f02018-09-21 10:36:44 -06005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Mark Lobodzinski <mark@lunarg.com>
19 * Author: Jon Ashburn <jon@lunarg.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 */
22
Mark Lobodzinski63902f02018-09-21 10:36:44 -060023// Suppress unused warning on Linux
24#if defined(__GNUC__)
25#define DECORATE_UNUSED __attribute__((unused))
26#else
27#define DECORATE_UNUSED
28#endif
29
30// clang-format off
31static const char DECORATE_UNUSED *kVUID_ObjectTracker_Info = "UNASSIGNED-ObjectTracker-Info";
32static const char DECORATE_UNUSED *kVUID_ObjectTracker_InternalError = "UNASSIGNED-ObjectTracker-InternalError";
33static const char DECORATE_UNUSED *kVUID_ObjectTracker_ObjectLeak = "UNASSIGNED-ObjectTracker-ObjectLeak";
34static const char DECORATE_UNUSED *kVUID_ObjectTracker_UnknownObject = "UNASSIGNED-ObjectTracker-UnknownObject";
35// clang-format on
36
37#undef DECORATE_UNUSED
38
39extern uint64_t object_track_index;
40
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060041// Object Status -- used to track state of individual objects
42typedef VkFlags ObjectStatusFlags;
43enum ObjectStatusFlagBits {
44 OBJSTATUS_NONE = 0x00000000, // No status is set
45 OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
46 OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
47 OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
48 OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
49 OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
50 OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
51 OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
52 OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000080, // Allocated with custom allocator
53};
54
55// Object and state information structure
56struct ObjTrackState {
Jeff Bolzcf802bc2019-02-10 00:18:00 -060057 uint64_t handle; // Object handle (new)
58 VulkanObjectType object_type; // Object type identifier
59 ObjectStatusFlags status; // Object state
60 uint64_t parent_object; // Parent object
61 std::unique_ptr<std::unordered_set<uint64_t> > child_objects; // Child objects (used for VkDescriptorPool only)
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060062};
63
64// Track Queue information
65struct ObjTrackQueueInfo {
66 uint32_t queue_node_index;
67 VkQueue queue;
68};
69
70typedef std::unordered_map<uint64_t, ObjTrackState *> object_map_type;
71
72class ObjectLifetimes : public ValidationObject {
Mark Lobodzinski0c668462018-09-27 10:13:19 -060073 public:
Mark Lobodzinskiadd93232018-10-09 11:49:42 -060074 uint64_t num_objects[kVulkanObjectTypeMax + 1];
75 uint64_t num_total_objects;
76 // Vector of unordered_maps per object type to hold ObjTrackState info
77 std::vector<object_map_type> object_map;
78 // Special-case map for swapchain images
79 std::unordered_map<uint64_t, ObjTrackState *> swapchainImageMap;
80 // Map of queue information structures, one per queue
81 std::unordered_map<VkQueue, ObjTrackQueueInfo *> queue_info_map;
82
83 std::vector<VkQueueFamilyProperties> queue_family_properties;
84
85 // Constructor for object lifetime tracking
86 ObjectLifetimes() : num_objects{}, num_total_objects(0), object_map{} { object_map.resize(kVulkanObjectTypeMax + 1); }
Mark Lobodzinski63902f02018-09-21 10:36:44 -060087
Mark Lobodzinski0c668462018-09-27 10:13:19 -060088 bool DeviceReportUndestroyedObjects(VkDevice device, VulkanObjectType object_type, const std::string &error_code);
89 void DeviceDestroyUndestroyedObjects(VkDevice device, VulkanObjectType object_type);
90 void CreateQueue(VkDevice device, VkQueue vkObj);
91 void AddQueueInfo(VkDevice device, uint32_t queue_node_index, VkQueue queue);
92 void ValidateQueueFlags(VkQueue queue, const char *function);
93 void AllocateCommandBuffer(VkDevice device, const VkCommandPool command_pool, const VkCommandBuffer command_buffer,
94 VkCommandBufferLevel level);
95 void AllocateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
96 void CreateSwapchainImageObject(VkDevice dispatchable_object, VkImage swapchain_image, VkSwapchainKHR swapchain);
97 bool ReportUndestroyedObjects(VkDevice device, const std::string &error_code);
98 void DestroyUndestroyedObjects(VkDevice device);
John Zulauf1c3844a2019-04-01 17:39:48 -060099 bool ValidateDeviceObject(const VulkanTypedHandle &device_typed, const char *invalid_handle_code,
100 const char *wrong_device_code);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600101 void DestroyQueueDataStructures(VkDevice device);
102 bool ValidateCommandBuffer(VkDevice device, VkCommandPool command_pool, VkCommandBuffer command_buffer);
103 bool ValidateDescriptorSet(VkDevice device, VkDescriptorPool descriptor_pool, VkDescriptorSet descriptor_set);
104 bool ValidateSamplerObjects(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo);
105 template <typename DispObj>
106 bool ValidateDescriptorWrite(DispObj disp, VkWriteDescriptorSet const *desc, bool isPush);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600107
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600108 ObjectLifetimes *GetObjectLifetimeData(std::vector<ValidationObject *> &object_dispatch) {
109 for (auto layer_object : object_dispatch) {
110 if (layer_object->container_type == LayerObjectTypeObjectTracker) {
111 return (reinterpret_cast<ObjectLifetimes *>(layer_object));
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600112 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600113 }
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600114 return nullptr;
115 };
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600116
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600117 template <typename T1, typename T2>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600118 bool ValidateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, bool null_allowed,
Jeff Bolzfdd0d852019-02-03 21:55:12 -0600119 const char *invalid_handle_code, const char *wrong_device_code) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600120 if (null_allowed && (object == VK_NULL_HANDLE)) {
121 return false;
122 }
123 auto object_handle = HandleToUint64(object);
124
125 if (object_type == kVulkanObjectTypeDevice) {
John Zulauf1c3844a2019-04-01 17:39:48 -0600126 return ValidateDeviceObject(VulkanTypedHandle(object, object_type), invalid_handle_code, wrong_device_code);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600127 }
128
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600129 VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600130
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600131 // Look for object in object map
132 if (object_map[object_type].find(object_handle) == object_map[object_type].end()) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600133 // If object is an image, also look for it in the swapchain image map
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600134 if ((object_type != kVulkanObjectTypeImage) || (swapchainImageMap.find(object_handle) == swapchainImageMap.end())) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600135 // Object not found, look for it in other device object maps
136 for (auto other_device_data : layer_data_map) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600137 for (auto layer_object_data : other_device_data.second->object_dispatch) {
138 if (layer_object_data->container_type == LayerObjectTypeObjectTracker) {
139 auto object_lifetime_data = reinterpret_cast<ObjectLifetimes *>(layer_object_data);
140 if (object_lifetime_data && (object_lifetime_data != this)) {
141 if (object_lifetime_data->object_map[object_type].find(object_handle) !=
142 object_lifetime_data->object_map[object_type].end() ||
143 (object_type == kVulkanObjectTypeImage &&
144 object_lifetime_data->swapchainImageMap.find(object_handle) !=
145 object_lifetime_data->swapchainImageMap.end())) {
146 // Object found on other device, report an error if object has a device parent error code
147 if ((wrong_device_code != kVUIDUndefined) && (object_type != kVulkanObjectTypeSurfaceKHR)) {
148 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
149 wrong_device_code,
150 "Object 0x%" PRIxLEAST64
151 " was not created, allocated or retrieved from the correct device.",
152 object_handle);
153 } else {
154 return false;
155 }
156 }
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600157 }
158 }
159 }
160 }
161 // Report an error if object was not found anywhere
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600162 return log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle, invalid_handle_code,
163 "Invalid %s Object 0x%" PRIxLEAST64 ".", object_string[object_type], object_handle);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600164 }
165 }
166 return false;
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600167 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600168
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600169 template <typename T1, typename T2>
170 void CreateObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type, const VkAllocationCallbacks *pAllocator) {
171 uint64_t object_handle = HandleToUint64(object);
172 bool custom_allocator = (pAllocator != nullptr);
173 if (!object_map[object_type].count(object_handle)) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600174 ObjTrackState *pNewObjNode = new ObjTrackState;
175 pNewObjNode->object_type = object_type;
176 pNewObjNode->status = custom_allocator ? OBJSTATUS_CUSTOM_ALLOCATOR : OBJSTATUS_NONE;
177 pNewObjNode->handle = object_handle;
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600178
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600179 object_map[object_type][object_handle] = pNewObjNode;
180 num_objects[object_type]++;
181 num_total_objects++;
Jeff Bolzcf802bc2019-02-10 00:18:00 -0600182
183 if (object_type == kVulkanObjectTypeDescriptorPool) {
184 pNewObjNode->child_objects.reset(new std::unordered_set<uint64_t>);
185 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600186 }
187 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600188
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600189 template <typename T1>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600190 void DestroyObjectSilently(T1 object, VulkanObjectType object_type) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600191 auto object_handle = HandleToUint64(object);
192 assert(object_handle != VK_NULL_HANDLE);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600193
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600194 auto item = object_map[object_type].find(object_handle);
195 assert(item != object_map[object_type].end());
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600196
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600197 ObjTrackState *pNode = item->second;
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600198 assert(num_total_objects > 0);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600199
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600200 num_total_objects--;
201 assert(num_objects[pNode->object_type] > 0);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600202
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600203 num_objects[pNode->object_type]--;
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600204
205 delete pNode;
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600206 object_map[object_type].erase(item);
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600207 }
208
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600209 template <typename T1, typename T2>
210 void RecordDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600211 auto object_handle = HandleToUint64(object);
212 if (object_handle != VK_NULL_HANDLE) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600213 auto item = object_map[object_type].find(object_handle);
214 if (item != object_map[object_type].end()) {
215 DestroyObjectSilently(object, object_type);
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600216 }
217 }
218 }
Mark Lobodzinski63902f02018-09-21 10:36:44 -0600219
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600220 template <typename T1, typename T2>
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600221 bool ValidateDestroyObject(T1 dispatchable_object, T2 object, VulkanObjectType object_type,
Jeff Bolzfdd0d852019-02-03 21:55:12 -0600222 const VkAllocationCallbacks *pAllocator, const char *expected_custom_allocator_code,
223 const char *expected_default_allocator_code) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600224 auto object_handle = HandleToUint64(object);
225 bool custom_allocator = pAllocator != nullptr;
226 VkDebugReportObjectTypeEXT debug_object_type = get_debug_report_enum[object_type];
227 bool skip = false;
228
229 if (object_handle != VK_NULL_HANDLE) {
Mark Lobodzinskiadd93232018-10-09 11:49:42 -0600230 auto item = object_map[object_type].find(object_handle);
231 if (item != object_map[object_type].end()) {
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600232 ObjTrackState *pNode = item->second;
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600233 auto allocated_with_custom = (pNode->status & OBJSTATUS_CUSTOM_ALLOCATOR) ? true : false;
234 if (allocated_with_custom && !custom_allocator && expected_custom_allocator_code != kVUIDUndefined) {
235 // This check only verifies that custom allocation callbacks were provided to both Create and Destroy calls,
236 // it cannot verify that these allocation callbacks are compatible with each other.
237 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
238 expected_custom_allocator_code,
239 "Custom allocator not specified while destroying %s obj 0x%" PRIxLEAST64
240 " but specified at creation.",
241 object_string[object_type], object_handle);
242 } else if (!allocated_with_custom && custom_allocator && expected_default_allocator_code != kVUIDUndefined) {
243 skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, debug_object_type, object_handle,
244 expected_default_allocator_code,
245 "Custom allocator specified while destroying %s obj 0x%" PRIxLEAST64
246 " but not specified at creation.",
247 object_string[object_type], object_handle);
248 }
249 }
250 }
251 return skip;
252 }
253
Mark Lobodzinski0c668462018-09-27 10:13:19 -0600254#include "object_tracker.h"
255};