layers: Add type safety to VK_OBJECT uses
Typedef VK_OBJECT as the more typesafe VulkanTypedHandle. Use more
typesafe forms in various container insertions. Disambiguate uniform
and initializer list usages, given the new constructors.
Add 64 bit specific assert to validate that kVulkanObjectType argument
matches the traits type.
Change-Id: I8661b68d778ae36da121c25fd80e5c44265893e0
diff --git a/layers/buffer_validation.cpp b/layers/buffer_validation.cpp
index 996fe11..f1ac0fc 100644
--- a/layers/buffer_validation.cpp
+++ b/layers/buffer_validation.cpp
@@ -1466,7 +1466,7 @@
bool CoreChecks::PreCallValidateDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
IMAGE_STATE *image_state = GetImageState(image);
- const VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
+ const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
bool skip = false;
if (image_state) {
skip |= ValidateObjectNotInUse(image_state, obj_struct, "vkDestroyImage", "VUID-vkDestroyImage-image-01000");
@@ -1477,7 +1477,7 @@
void CoreChecks::PreCallRecordDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
if (!image) return;
IMAGE_STATE *image_state = GetImageState(image);
- VK_OBJECT obj_struct = {HandleToUint64(image), kVulkanObjectTypeImage};
+ const VulkanTypedHandle obj_struct(image, kVulkanObjectTypeImage);
InvalidateCommandBuffers(image_state->cb_bindings, obj_struct);
// Clean up memory mapping, bindings and range references for image
for (auto mem_binding : image_state->GetBoundMemory()) {
@@ -1486,7 +1486,7 @@
RemoveImageMemoryRange(obj_struct.handle, mem_info);
}
}
- ClearMemoryObjectBindings(obj_struct.handle, kVulkanObjectTypeImage);
+ ClearMemoryObjectBindings(obj_struct);
EraseQFOReleaseBarriers<VkImageMemoryBarrier>(image);
// Remove image from imageMap
imageMap.erase(image);
@@ -3626,11 +3626,11 @@
// Helper function to validate correct usage bits set for buffers or images. Verify that (actual & desired) flags != 0 or, if strict
// is true, verify that (actual & desired) flags == desired
-bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, uint64_t obj_handle,
- VulkanObjectType obj_type, const char *msgCode, char const *func_name, char const *usage_str) {
+bool CoreChecks::ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle &typed_handle,
+ const char *msgCode, char const *func_name, char const *usage_str) {
bool correct_usage = false;
bool skip = false;
- const char *type_str = object_string[obj_type];
+ const char *type_str = object_string[typed_handle.type];
if (strict) {
correct_usage = ((actual & desired) == desired);
} else {
@@ -3639,14 +3639,15 @@
if (!correct_usage) {
if (msgCode == kVUIDUndefined) {
// TODO: Fix callers with kVUIDUndefined to use correct validation checks.
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle,
- kVUID_Core_MemTrack_InvalidUsageFlag,
+ skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type],
+ typed_handle.handle, kVUID_Core_MemTrack_InvalidUsageFlag,
"Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
- type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
+ type_str, report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
} else {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_type], obj_handle, msgCode,
- "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
- type_str, report_data->FormatHandle(obj_handle).c_str(), func_name, type_str, usage_str);
+ skip =
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[typed_handle.type], typed_handle.handle,
+ msgCode, "Invalid usage flag for %s %s used by %s. In this case, %s should have %s set during creation.",
+ type_str, report_data->FormatHandle(typed_handle).c_str(), func_name, type_str, usage_str);
}
}
return skip;
@@ -3656,8 +3657,8 @@
// where an error will be flagged if usage is not correct
bool CoreChecks::ValidateImageUsageFlags(IMAGE_STATE const *image_state, VkFlags desired, bool strict, const char *msgCode,
char const *func_name, char const *usage_string) {
- return ValidateUsageFlags(image_state->createInfo.usage, desired, strict, HandleToUint64(image_state->image),
- kVulkanObjectTypeImage, msgCode, func_name, usage_string);
+ return ValidateUsageFlags(image_state->createInfo.usage, desired, strict,
+ VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage), msgCode, func_name, usage_string);
}
bool CoreChecks::ValidateImageFormatFeatureFlags(IMAGE_STATE const *image_state, VkFormatFeatureFlags desired,
@@ -3715,8 +3716,8 @@
// where an error will be flagged if usage is not correct
bool CoreChecks::ValidateBufferUsageFlags(BUFFER_STATE const *buffer_state, VkFlags desired, bool strict, const char *msgCode,
char const *func_name, char const *usage_string) {
- return ValidateUsageFlags(buffer_state->createInfo.usage, desired, strict, HandleToUint64(buffer_state->buffer),
- kVulkanObjectTypeBuffer, msgCode, func_name, usage_string);
+ return ValidateUsageFlags(buffer_state->createInfo.usage, desired, strict,
+ VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), msgCode, func_name, usage_string);
}
bool CoreChecks::ValidateBufferViewRange(const BUFFER_STATE *buffer_state, const VkBufferViewCreateInfo *pCreateInfo,
@@ -4362,7 +4363,7 @@
bool CoreChecks::PreCallValidateDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
- VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
+ const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
bool skip = false;
if (image_view_state) {
@@ -4375,7 +4376,7 @@
void CoreChecks::PreCallRecordDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) {
IMAGE_VIEW_STATE *image_view_state = GetImageViewState(imageView);
if (!image_view_state) return;
- VK_OBJECT obj_struct = {HandleToUint64(imageView), kVulkanObjectTypeImageView};
+ const VulkanTypedHandle obj_struct(imageView, kVulkanObjectTypeImageView);
// Any bound cmd buffers are now invalid
InvalidateCommandBuffers(image_view_state->cb_bindings, obj_struct);
@@ -4395,7 +4396,7 @@
void CoreChecks::PreCallRecordDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
if (!buffer) return;
auto buffer_state = GetBufferState(buffer);
- VK_OBJECT obj_struct = {HandleToUint64(buffer), kVulkanObjectTypeBuffer};
+ const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
InvalidateCommandBuffers(buffer_state->cb_bindings, obj_struct);
for (auto mem_binding : buffer_state->GetBoundMemory()) {
@@ -4404,7 +4405,7 @@
RemoveBufferMemoryRange(HandleToUint64(buffer), mem_info);
}
}
- ClearMemoryObjectBindings(HandleToUint64(buffer), kVulkanObjectTypeBuffer);
+ ClearMemoryObjectBindings(obj_struct);
EraseQFOReleaseBarriers<VkBufferMemoryBarrier>(buffer);
bufferMap.erase(buffer_state->buffer);
}
@@ -4412,7 +4413,7 @@
bool CoreChecks::PreCallValidateDestroyBufferView(VkDevice device, VkBufferView bufferView,
const VkAllocationCallbacks *pAllocator) {
auto buffer_view_state = GetBufferViewState(bufferView);
- VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
+ const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
bool skip = false;
if (buffer_view_state) {
skip |= ValidateObjectNotInUse(buffer_view_state, obj_struct, "vkDestroyBufferView",
@@ -4424,7 +4425,7 @@
void CoreChecks::PreCallRecordDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) {
if (!bufferView) return;
auto buffer_view_state = GetBufferViewState(bufferView);
- VK_OBJECT obj_struct = {HandleToUint64(bufferView), kVulkanObjectTypeBufferView};
+ const VulkanTypedHandle obj_struct(bufferView, kVulkanObjectTypeBufferView);
// Any bound cmd buffers are now invalid
InvalidateCommandBuffers(buffer_view_state->cb_bindings, obj_struct);
diff --git a/layers/core_validation.cpp b/layers/core_validation.cpp
index 203e821..d6e3b37 100644
--- a/layers/core_validation.cpp
+++ b/layers/core_validation.cpp
@@ -252,12 +252,12 @@
}
// Return ptr to memory binding for given handle of specified type
-BINDABLE *CoreChecks::GetObjectMemBinding(uint64_t handle, VulkanObjectType type) {
- switch (type) {
+BINDABLE *CoreChecks::GetObjectMemBinding(const VulkanTypedHandle &typed_handle) {
+ switch (typed_handle.type) {
case kVulkanObjectTypeImage:
- return GetImageState(VkImage(handle));
+ return GetImageState(VkImage(typed_handle.handle));
case kVulkanObjectTypeBuffer:
- return GetBufferState(VkBuffer(handle));
+ return GetBufferState(VkBuffer(typed_handle.handle));
default:
break;
}
@@ -377,7 +377,7 @@
// Create binding link between given sampler and command buffer node
void CoreChecks::AddCommandBufferBindingSampler(CMD_BUFFER_STATE *cb_node, SAMPLER_STATE *sampler_state) {
- auto inserted = cb_node->object_bindings.insert({HandleToUint64(sampler_state->sampler), kVulkanObjectTypeSampler});
+ auto inserted = cb_node->object_bindings.emplace(sampler_state->sampler, kVulkanObjectTypeSampler);
if (inserted.second) {
// Only need to complete the cross-reference if this is a new item
sampler_state->cb_bindings.insert(cb_node);
@@ -389,7 +389,7 @@
// Skip validation if this image was created through WSI
if (image_state->binding.mem != MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
// First update cb binding for image
- auto image_inserted = cb_node->object_bindings.insert({HandleToUint64(image_state->image), kVulkanObjectTypeImage});
+ auto image_inserted = cb_node->object_bindings.emplace(image_state->image, kVulkanObjectTypeImage);
if (image_inserted.second) {
// Only need to continue if this is a new item (the rest of the work would have be done previous)
image_state->cb_bindings.insert(cb_node);
@@ -412,7 +412,7 @@
// Create binding link between given image view node and its image with command buffer node
void CoreChecks::AddCommandBufferBindingImageView(CMD_BUFFER_STATE *cb_node, IMAGE_VIEW_STATE *view_state) {
// First add bindings for imageView
- auto inserted = cb_node->object_bindings.insert({HandleToUint64(view_state->image_view), kVulkanObjectTypeImageView});
+ auto inserted = cb_node->object_bindings.emplace(view_state->image_view, kVulkanObjectTypeImageView);
if (inserted.second) {
// Only need to continue if this is a new item
view_state->cb_bindings.insert(cb_node);
@@ -427,7 +427,7 @@
// Create binding link between given buffer node and command buffer node
void CoreChecks::AddCommandBufferBindingBuffer(CMD_BUFFER_STATE *cb_node, BUFFER_STATE *buffer_state) {
// First update cb binding for buffer
- auto buffer_inserted = cb_node->object_bindings.insert({HandleToUint64(buffer_state->buffer), kVulkanObjectTypeBuffer});
+ auto buffer_inserted = cb_node->object_bindings.emplace(buffer_state->buffer, kVulkanObjectTypeBuffer);
if (buffer_inserted.second) {
// Only need to continue if this is a new item
buffer_state->cb_bindings.insert(cb_node);
@@ -449,7 +449,7 @@
// Create binding link between given buffer view node and its buffer with command buffer node
void CoreChecks::AddCommandBufferBindingBufferView(CMD_BUFFER_STATE *cb_node, BUFFER_VIEW_STATE *view_state) {
// First add bindings for bufferView
- auto inserted = cb_node->object_bindings.insert({HandleToUint64(view_state->buffer_view), kVulkanObjectTypeBufferView});
+ auto inserted = cb_node->object_bindings.emplace(view_state->buffer_view, kVulkanObjectTypeBufferView);
if (inserted.second) {
// Only need to complete the cross-reference if this is a new item
view_state->cb_bindings.insert(cb_node);
@@ -477,44 +477,46 @@
}
// Clear a single object binding from given memory object
-void CoreChecks::ClearMemoryObjectBinding(uint64_t handle, VulkanObjectType type, VkDeviceMemory mem) {
+void CoreChecks::ClearMemoryObjectBinding(const VulkanTypedHandle &typed_handle, VkDeviceMemory mem) {
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
// This obj is bound to a memory object. Remove the reference to this object in that memory object's list
if (mem_info) {
- mem_info->obj_bindings.erase({handle, type});
+ mem_info->obj_bindings.erase(typed_handle);
}
}
// ClearMemoryObjectBindings clears the binding of objects to memory
// For the given object it pulls the memory bindings and makes sure that the bindings
// no longer refer to the object being cleared. This occurs when objects are destroyed.
-void CoreChecks::ClearMemoryObjectBindings(uint64_t handle, VulkanObjectType type) {
- BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
+void CoreChecks::ClearMemoryObjectBindings(const VulkanTypedHandle &typed_handle) {
+ BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
if (mem_binding) {
if (!mem_binding->sparse) {
- ClearMemoryObjectBinding(handle, type, mem_binding->binding.mem);
+ ClearMemoryObjectBinding(typed_handle, mem_binding->binding.mem);
} else { // Sparse, clear all bindings
for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
- ClearMemoryObjectBinding(handle, type, sparse_mem_binding.mem);
+ ClearMemoryObjectBinding(typed_handle, sparse_mem_binding.mem);
}
}
}
}
// For given mem object, verify that it is not null or UNBOUND, if it is, report error. Return skip value.
-bool CoreChecks::VerifyBoundMemoryIsValid(VkDeviceMemory mem, uint64_t handle, const char *api_name, const char *type_name,
+bool CoreChecks::VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *api_name,
const char *error_code) {
bool result = false;
+ auto type_name = object_string[typed_handle.type];
if (VK_NULL_HANDLE == mem) {
- result = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle, error_code,
- "%s: Vk%s object %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
- api_name, type_name, report_data->FormatHandle(handle).c_str(), type_name);
- } else if (MEMORY_UNBOUND == mem) {
result =
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle, error_code,
- "%s: Vk%s object %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
- "prior to this operation.",
- api_name, type_name, report_data->FormatHandle(handle).c_str());
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle,
+ error_code, "%s: Vk%s object %s used with no memory bound. Memory should be bound by calling vkBind%sMemory().",
+ api_name, type_name, report_data->FormatHandle(typed_handle).c_str(), type_name);
+ } else if (MEMORY_UNBOUND == mem) {
+ result = log_msg(
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, typed_handle.handle, error_code,
+ "%s: Vk%s object %s used with no memory bound and previously bound memory was freed. Memory must not be freed "
+ "prior to this operation.",
+ api_name, type_name, report_data->FormatHandle(typed_handle).c_str());
}
return result;
}
@@ -523,8 +525,8 @@
bool CoreChecks::ValidateMemoryIsBoundToImage(const IMAGE_STATE *image_state, const char *api_name, const char *error_code) {
bool result = false;
if (0 == (static_cast<uint32_t>(image_state->createInfo.flags) & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)) {
- result =
- VerifyBoundMemoryIsValid(image_state->binding.mem, HandleToUint64(image_state->image), api_name, "Image", error_code);
+ result = VerifyBoundMemoryIsValid(image_state->binding.mem, VulkanTypedHandle(image_state->image, kVulkanObjectTypeImage),
+ api_name, error_code);
}
return result;
}
@@ -533,16 +535,16 @@
bool CoreChecks::ValidateMemoryIsBoundToBuffer(const BUFFER_STATE *buffer_state, const char *api_name, const char *error_code) {
bool result = false;
if (0 == (static_cast<uint32_t>(buffer_state->createInfo.flags) & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)) {
- result = VerifyBoundMemoryIsValid(buffer_state->binding.mem, HandleToUint64(buffer_state->buffer), api_name, "Buffer",
- error_code);
+ result = VerifyBoundMemoryIsValid(buffer_state->binding.mem,
+ VulkanTypedHandle(buffer_state->buffer, kVulkanObjectTypeBuffer), api_name, error_code);
}
return result;
}
// SetMemBinding is used to establish immutable, non-sparse binding between a single image/buffer object and memory object.
// Corresponding valid usage checks are in ValidateSetMemBinding().
-void CoreChecks::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset, uint64_t handle,
- VulkanObjectType type) {
+void CoreChecks::SetMemBinding(VkDeviceMemory mem, BINDABLE *mem_binding, VkDeviceSize memory_offset,
+ const VulkanTypedHandle &typed_handle) {
assert(mem_binding);
mem_binding->binding.mem = mem;
mem_binding->UpdateBoundMemorySet(); // force recreation of cached set
@@ -552,10 +554,10 @@
if (mem != VK_NULL_HANDLE) {
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
if (mem_info) {
- mem_info->obj_bindings.insert({handle, type});
+ mem_info->obj_bindings.insert(typed_handle);
// For image objects, make sure default memory state is correctly set
// TODO : What's the best/correct way to handle this?
- if (kVulkanObjectTypeImage == type) {
+ if (kVulkanObjectTypeImage == typed_handle.type) {
auto const image_state = reinterpret_cast<const IMAGE_STATE *>(mem_binding);
if (image_state) {
VkImageCreateInfo ici = image_state->createInfo;
@@ -575,43 +577,43 @@
// Otherwise, add reference from objectInfo to memoryInfo
// Add reference off of objInfo
// TODO: We may need to refactor or pass in multiple valid usage statements to handle multiple valid usage conditions.
-bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, uint64_t handle, VulkanObjectType type, const char *apiName) {
+bool CoreChecks::ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle &typed_handle, const char *apiName) {
bool skip = false;
// It's an error to bind an object to NULL memory
if (mem != VK_NULL_HANDLE) {
- BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
+ BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
assert(mem_binding);
if (mem_binding->sparse) {
const char *error_code = "VUID-vkBindImageMemory-image-01045";
const char *handle_type = "IMAGE";
- if (type == kVulkanObjectTypeBuffer) {
+ if (typed_handle.type == kVulkanObjectTypeBuffer) {
error_code = "VUID-vkBindBufferMemory-buffer-01030";
handle_type = "BUFFER";
} else {
- assert(type == kVulkanObjectTypeImage);
+ assert(typed_handle.type == kVulkanObjectTypeImage);
}
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- HandleToUint64(mem), error_code,
- "In %s, attempting to bind memory (%s) to object (%s) which was created with sparse memory flags "
- "(VK_%s_CREATE_SPARSE_*_BIT).",
- apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(handle).c_str(), handle_type);
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ HandleToUint64(mem), error_code,
+ "In %s, attempting to bind memory (%s) to object (%s) which was created with sparse memory flags "
+ "(VK_%s_CREATE_SPARSE_*_BIT).",
+ apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
+ handle_type);
}
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
if (mem_info) {
DEVICE_MEMORY_STATE *prev_binding = GetDevMemState(mem_binding->binding.mem);
if (prev_binding) {
const char *error_code = "VUID-vkBindImageMemory-image-01044";
- if (type == kVulkanObjectTypeBuffer) {
+ if (typed_handle.type == kVulkanObjectTypeBuffer) {
error_code = "VUID-vkBindBufferMemory-buffer-01029";
} else {
- assert(type == kVulkanObjectTypeImage);
+ assert(typed_handle.type == kVulkanObjectTypeImage);
}
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
HandleToUint64(mem), error_code,
"In %s, attempting to bind memory (%s) to object (%s) which has already been bound to mem object %s.",
- apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(handle).c_str(),
+ apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str(),
report_data->FormatHandle(prev_binding->mem).c_str());
} else if (mem_binding->binding.mem == MEMORY_UNBOUND) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
@@ -619,7 +621,7 @@
"In %s, attempting to bind memory (%s) to object (%s) which was previous bound to memory that has "
"since been freed. Memory bindings are immutable in "
"Vulkan so this attempt to bind to new memory is not allowed.",
- apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(handle).c_str());
+ apiName, report_data->FormatHandle(mem).c_str(), report_data->FormatHandle(typed_handle).c_str());
}
}
}
@@ -632,19 +634,19 @@
// Add reference from objectInfo to memoryInfo
// Add reference off of object's binding info
// Return VK_TRUE if addition is successful, VK_FALSE otherwise
-bool CoreChecks::SetSparseMemBinding(MEM_BINDING binding, uint64_t handle, VulkanObjectType type) {
+bool CoreChecks::SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle &typed_handle) {
bool skip = VK_FALSE;
// Handle NULL case separately, just clear previous binding & decrement reference
if (binding.mem == VK_NULL_HANDLE) {
// TODO : This should cause the range of the resource to be unbound according to spec
} else {
- BINDABLE *mem_binding = GetObjectMemBinding(handle, type);
+ BINDABLE *mem_binding = GetObjectMemBinding(typed_handle);
assert(mem_binding);
if (mem_binding) { // Invalid handles are reported by object tracker, but Get returns NULL for them, so avoid SEGV here
assert(mem_binding->sparse);
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(binding.mem);
if (mem_info) {
- mem_info->obj_bindings.insert({handle, type});
+ mem_info->obj_bindings.insert(typed_handle);
// Need to set mem binding for this object
mem_binding->sparse_bindings.insert(binding);
mem_binding->UpdateBoundMemorySet();
@@ -1968,7 +1970,7 @@
HandleToUint64(cb_state->commandBuffer), kVUID_Core_DrawState_InvalidCommandBuffer,
"You are adding %s to command buffer %s that is invalid because bound %s %s was %s.", call_source,
report_data->FormatHandle(cb_state->commandBuffer).c_str(), type_str,
- report_data->FormatHandle(obj.handle).c_str(), cause_str);
+ report_data->FormatHandle(obj).c_str(), cause_str);
}
return skip;
}
@@ -2166,7 +2168,7 @@
pCB->primaryCommandBuffer = VK_NULL_HANDLE;
// If secondary, invalidate any primary command buffer that may call us.
if (pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
- InvalidateCommandBuffers(pCB->linkedCommandBuffers, {HandleToUint64(cb), kVulkanObjectTypeCommandBuffer});
+ InvalidateCommandBuffers(pCB->linkedCommandBuffers, VulkanTypedHandle(cb, kVulkanObjectTypeCommandBuffer));
}
// Remove reverse command buffer links.
@@ -2892,7 +2894,7 @@
}
// Check that the queue family index of 'queue' matches one of the entries in pQueueFamilyIndices
-bool CoreChecks::ValidImageBufferQueue(CMD_BUFFER_STATE *cb_node, const VK_OBJECT *object, VkQueue queue, uint32_t count,
+bool CoreChecks::ValidImageBufferQueue(CMD_BUFFER_STATE *cb_node, const VK_OBJECT &object, VkQueue queue, uint32_t count,
const uint32_t *indices) {
bool found = false;
bool skip = false;
@@ -2906,12 +2908,12 @@
}
if (!found) {
- skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object->type], object->handle,
+ skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[object.type], object.handle,
kVUID_Core_DrawState_InvalidQueueFamily,
"vkQueueSubmit: Command buffer %s contains %s %s which was not created allowing concurrent access to "
"this queue family %d.",
- report_data->FormatHandle(cb_node->commandBuffer).c_str(), object_string[object->type],
- report_data->FormatHandle(object->handle).c_str(), queue_state->queueFamilyIndex);
+ report_data->FormatHandle(cb_node->commandBuffer).c_str(), object_string[object.type],
+ report_data->FormatHandle(object).c_str(), queue_state->queueFamilyIndex);
}
}
return skip;
@@ -2935,17 +2937,17 @@
}
// Ensure that any bound images or buffers created with SHARING_MODE_CONCURRENT have access to the current queue family
- for (auto object : pCB->object_bindings) {
+ for (const auto &object : pCB->object_bindings) {
if (object.type == kVulkanObjectTypeImage) {
- auto image_state = GetImageState(reinterpret_cast<VkImage &>(object.handle));
+ auto image_state = GetImageState(object.Cast<VkImage>());
if (image_state && image_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
- skip |= ValidImageBufferQueue(pCB, &object, queue, image_state->createInfo.queueFamilyIndexCount,
+ skip |= ValidImageBufferQueue(pCB, object, queue, image_state->createInfo.queueFamilyIndexCount,
image_state->createInfo.pQueueFamilyIndices);
}
} else if (object.type == kVulkanObjectTypeBuffer) {
- auto buffer_state = GetBufferState(reinterpret_cast<VkBuffer &>(object.handle));
+ auto buffer_state = GetBufferState(object.Cast<VkBuffer>());
if (buffer_state && buffer_state->createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) {
- skip |= ValidImageBufferQueue(pCB, &object, queue, buffer_state->createInfo.queueFamilyIndexCount,
+ skip |= ValidImageBufferQueue(pCB, object, queue, buffer_state->createInfo.queueFamilyIndexCount,
buffer_state->createInfo.pQueueFamilyIndices);
}
}
@@ -3750,21 +3752,21 @@
}
// For given obj node, if it is use, flag a validation error and return callback result, else return false
-bool CoreChecks::ValidateObjectNotInUse(BASE_NODE *obj_node, VK_OBJECT obj_struct, const char *caller_name,
+bool CoreChecks::ValidateObjectNotInUse(BASE_NODE *obj_node, const VK_OBJECT &obj_struct, const char *caller_name,
const char *error_code) {
if (disabled.object_in_use) return false;
bool skip = false;
if (obj_node->in_use.load()) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, get_debug_report_enum[obj_struct.type], obj_struct.handle,
error_code, "Cannot call %s on %s %s that is currently in use by a command buffer.", caller_name,
- object_string[obj_struct.type], report_data->FormatHandle(obj_struct.handle).c_str());
+ object_string[obj_struct.type], report_data->FormatHandle(obj_struct).c_str());
}
return skip;
}
bool CoreChecks::PreCallValidateFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
- VK_OBJECT obj_struct = {HandleToUint64(mem), kVulkanObjectTypeDeviceMemory};
+ const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
bool skip = false;
if (mem_info) {
skip |= ValidateObjectNotInUse(mem_info, obj_struct, "vkFreeMemory", "VUID-vkFreeMemory-memory-00677");
@@ -3775,17 +3777,20 @@
void CoreChecks::PreCallRecordFreeMemory(VkDevice device, VkDeviceMemory mem, const VkAllocationCallbacks *pAllocator) {
if (!mem) return;
DEVICE_MEMORY_STATE *mem_info = GetDevMemState(mem);
- VK_OBJECT obj_struct = {HandleToUint64(mem), kVulkanObjectTypeDeviceMemory};
+ const VulkanTypedHandle obj_struct(mem, kVulkanObjectTypeDeviceMemory);
// Clear mem binding for any bound objects
- for (auto obj : mem_info->obj_bindings) {
+ for (const auto &obj : mem_info->obj_bindings) {
+ log_msg(report_data, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, get_debug_report_enum[obj.type], obj.handle,
+ kVUID_Core_MemTrack_FreedMemRef, "VK Object %s still has a reference to mem obj %s.",
+ report_data->FormatHandle(obj).c_str(), report_data->FormatHandle(mem_info->mem).c_str());
BINDABLE *bindable_state = nullptr;
switch (obj.type) {
case kVulkanObjectTypeImage:
- bindable_state = GetImageState(reinterpret_cast<VkImage &>(obj.handle));
+ bindable_state = GetImageState(obj.Cast<VkImage>());
break;
case kVulkanObjectTypeBuffer:
- bindable_state = GetBufferState(reinterpret_cast<VkBuffer &>(obj.handle));
+ bindable_state = GetBufferState(obj.Cast<VkBuffer>());
break;
default:
// Should only have buffer or image objects bound to memory
@@ -4046,7 +4051,7 @@
bool CoreChecks::PreCallValidateDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
- VK_OBJECT obj_struct = {HandleToUint64(semaphore), kVulkanObjectTypeSemaphore};
+ const VulkanTypedHandle obj_struct(semaphore, kVulkanObjectTypeSemaphore);
bool skip = false;
if (sema_node) {
skip |= ValidateObjectNotInUse(sema_node, obj_struct, "vkDestroySemaphore", "VUID-vkDestroySemaphore-semaphore-01137");
@@ -4061,7 +4066,7 @@
bool CoreChecks::PreCallValidateDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
EVENT_STATE *event_state = GetEventState(event);
- VK_OBJECT obj_struct = {HandleToUint64(event), kVulkanObjectTypeEvent};
+ const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
bool skip = false;
if (event_state) {
skip |= ValidateObjectNotInUse(event_state, obj_struct, "vkDestroyEvent", "VUID-vkDestroyEvent-event-01145");
@@ -4072,7 +4077,7 @@
void CoreChecks::PreCallRecordDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) {
if (!event) return;
EVENT_STATE *event_state = GetEventState(event);
- VK_OBJECT obj_struct = {HandleToUint64(event), kVulkanObjectTypeEvent};
+ const VulkanTypedHandle obj_struct(event, kVulkanObjectTypeEvent);
InvalidateCommandBuffers(event_state->cb_bindings, obj_struct);
eventMap.erase(event);
}
@@ -4080,7 +4085,7 @@
bool CoreChecks::PreCallValidateDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
if (disabled.query_validation) return false;
QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
- VK_OBJECT obj_struct = {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool};
+ const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
bool skip = false;
if (qp_state) {
skip |= ValidateObjectNotInUse(qp_state, obj_struct, "vkDestroyQueryPool", "VUID-vkDestroyQueryPool-queryPool-00793");
@@ -4091,7 +4096,7 @@
void CoreChecks::PreCallRecordDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) {
if (!queryPool) return;
QUERY_POOL_STATE *qp_state = GetQueryPoolState(queryPool);
- VK_OBJECT obj_struct = {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool};
+ const VulkanTypedHandle obj_struct(queryPool, kVulkanObjectTypeQueryPool);
InvalidateCommandBuffers(qp_state->cb_bindings, obj_struct);
queryPoolMap.erase(queryPool);
}
@@ -4180,8 +4185,8 @@
"%s %s %s is aliased with %s %s %s which may indicate a bug. For further info refer to the Buffer-Image Granularity "
"section of the Vulkan specification. "
"(https://www.khronos.org/registry/vulkan/specs/1.0-extensions/xhtml/vkspec.html#resources-bufferimagegranularity)",
- r1_linear_str, r1_type_str, report_data->FormatHandle(range1->handle).c_str(), r2_linear_str, r2_type_str,
- report_data->FormatHandle(range2->handle).c_str());
+ r1_linear_str, r1_type_str, report_data->FormatHandle(MemoryRangeTypedHandle(*range1)).c_str(), r2_linear_str,
+ r2_type_str, report_data->FormatHandle(MemoryRangeTypedHandle(*range2)).c_str());
}
// Ranges intersect
return true;
@@ -4230,8 +4235,9 @@
HandleToUint64(mem_info->mem), error_code,
"In %s, attempting to bind memory (%s) to object (%s), memoryOffset=0x%" PRIxLEAST64
" must be less than the memory allocation size 0x%" PRIxLEAST64 ".",
- api_name, report_data->FormatHandle(mem_info->mem).c_str(), report_data->FormatHandle(handle).c_str(),
- memoryOffset, mem_info->alloc_info.allocationSize);
+ api_name, report_data->FormatHandle(mem_info->mem).c_str(),
+ report_data->FormatHandle(MemoryRangeTypedHandle(range)).c_str(), memoryOffset,
+ mem_info->alloc_info.allocationSize);
}
return skip;
@@ -4343,7 +4349,8 @@
if (buffer_state) {
// Track objects tied to memory
uint64_t buffer_handle = HandleToUint64(buffer);
- skip = ValidateSetMemBinding(mem, buffer_handle, kVulkanObjectTypeBuffer, api_name);
+ const VulkanTypedHandle obj_struct(buffer, kVulkanObjectTypeBuffer);
+ skip = ValidateSetMemBinding(mem, obj_struct, api_name);
if (!buffer_state->memory_requirements_checked) {
// There's not an explicit requirement in the spec to call vkGetBufferMemoryRequirements() prior to calling
// BindBufferMemory, but it's implied in that memory being bound must conform with VkMemoryRequirements from
@@ -4352,7 +4359,7 @@
log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, buffer_handle,
kVUID_Core_DrawState_InvalidBuffer,
"%s: Binding memory to buffer %s but vkGetBufferMemoryRequirements() has not been called on that buffer.",
- api_name, report_data->FormatHandle(buffer_handle).c_str());
+ api_name, report_data->FormatHandle(buffer).c_str());
// Make the call for them so we can verify the state
DispatchGetBufferMemoryRequirements(device, buffer, &buffer_state->requirements);
}
@@ -4399,7 +4406,7 @@
"to buffer %s and memoryOffset 0x%" PRIxLEAST64 " must be zero.",
api_name, report_data->FormatHandle(mem).c_str(),
report_data->FormatHandle(mem_info->dedicated_buffer).c_str(),
- report_data->FormatHandle(buffer_handle).c_str(), memoryOffset);
+ report_data->FormatHandle(buffer).c_str(), memoryOffset);
}
}
}
@@ -4420,8 +4427,7 @@
InsertBufferMemoryRange(buffer, mem_info, memoryOffset, buffer_state->requirements);
}
// Track objects tied to memory
- uint64_t buffer_handle = HandleToUint64(buffer);
- SetMemBinding(mem, buffer_state, memoryOffset, buffer_handle, kVulkanObjectTypeBuffer);
+ SetMemBinding(mem, buffer_state, memoryOffset, VulkanTypedHandle(buffer, kVulkanObjectTypeBuffer));
}
}
@@ -4601,7 +4607,7 @@
bool CoreChecks::PreCallValidateDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
- VK_OBJECT obj_struct = {HandleToUint64(pipeline), kVulkanObjectTypePipeline};
+ const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
bool skip = false;
if (pipeline_state) {
skip |= ValidateObjectNotInUse(pipeline_state, obj_struct, "vkDestroyPipeline", "VUID-vkDestroyPipeline-pipeline-00765");
@@ -4612,7 +4618,7 @@
void CoreChecks::PreCallRecordDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) {
if (!pipeline) return;
PIPELINE_STATE *pipeline_state = GetPipelineState(pipeline);
- VK_OBJECT obj_struct = {HandleToUint64(pipeline), kVulkanObjectTypePipeline};
+ const VulkanTypedHandle obj_struct(pipeline, kVulkanObjectTypePipeline);
// Any bound cmd buffers are now invalid
InvalidateCommandBuffers(pipeline_state->cb_bindings, obj_struct);
if (enabled.gpu_validation) {
@@ -4629,7 +4635,7 @@
bool CoreChecks::PreCallValidateDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
- VK_OBJECT obj_struct = {HandleToUint64(sampler), kVulkanObjectTypeSampler};
+ const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
bool skip = false;
if (sampler_state) {
skip |= ValidateObjectNotInUse(sampler_state, obj_struct, "vkDestroySampler", "VUID-vkDestroySampler-sampler-01082");
@@ -4640,7 +4646,7 @@
void CoreChecks::PreCallRecordDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) {
if (!sampler) return;
SAMPLER_STATE *sampler_state = GetSamplerState(sampler);
- VK_OBJECT obj_struct = {HandleToUint64(sampler), kVulkanObjectTypeSampler};
+ const VulkanTypedHandle obj_struct(sampler, kVulkanObjectTypeSampler);
// Any bound cmd buffers are now invalid
if (sampler_state) {
InvalidateCommandBuffers(sampler_state->cb_bindings, obj_struct);
@@ -4661,7 +4667,7 @@
bool CoreChecks::PreCallValidateDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
const VkAllocationCallbacks *pAllocator) {
DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
- VK_OBJECT obj_struct = {HandleToUint64(descriptorPool), kVulkanObjectTypeDescriptorPool};
+ const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
bool skip = false;
if (desc_pool_state) {
skip |= ValidateObjectNotInUse(desc_pool_state, obj_struct, "vkDestroyDescriptorPool",
@@ -4674,7 +4680,7 @@
const VkAllocationCallbacks *pAllocator) {
if (!descriptorPool) return;
DESCRIPTOR_POOL_STATE *desc_pool_state = GetDescriptorPoolState(descriptorPool);
- VK_OBJECT obj_struct = {HandleToUint64(descriptorPool), kVulkanObjectTypeDescriptorPool};
+ const VulkanTypedHandle obj_struct(descriptorPool, kVulkanObjectTypeDescriptorPool);
if (desc_pool_state) {
// Any bound cmd buffers are now invalid
InvalidateCommandBuffers(desc_pool_state->cb_bindings, obj_struct);
@@ -4882,7 +4888,7 @@
bool CoreChecks::PreCallValidateDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
const VkAllocationCallbacks *pAllocator) {
FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
- VK_OBJECT obj_struct = {HandleToUint64(framebuffer), kVulkanObjectTypeFramebuffer};
+ const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
bool skip = false;
if (framebuffer_state) {
skip |= ValidateObjectNotInUse(framebuffer_state, obj_struct, "vkDestroyFramebuffer",
@@ -4895,7 +4901,7 @@
const VkAllocationCallbacks *pAllocator) {
if (!framebuffer) return;
FRAMEBUFFER_STATE *framebuffer_state = GetFramebufferState(framebuffer);
- VK_OBJECT obj_struct = {HandleToUint64(framebuffer), kVulkanObjectTypeFramebuffer};
+ const VulkanTypedHandle obj_struct(framebuffer, kVulkanObjectTypeFramebuffer);
InvalidateCommandBuffers(framebuffer_state->cb_bindings, obj_struct);
frameBufferMap.erase(framebuffer);
}
@@ -4903,7 +4909,7 @@
bool CoreChecks::PreCallValidateDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
const VkAllocationCallbacks *pAllocator) {
RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
- VK_OBJECT obj_struct = {HandleToUint64(renderPass), kVulkanObjectTypeRenderPass};
+ const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
bool skip = false;
if (rp_state) {
skip |= ValidateObjectNotInUse(rp_state, obj_struct, "vkDestroyRenderPass", "VUID-vkDestroyRenderPass-renderPass-00873");
@@ -4914,7 +4920,7 @@
void CoreChecks::PreCallRecordDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) {
if (!renderPass) return;
RENDER_PASS_STATE *rp_state = GetRenderPassState(renderPass);
- VK_OBJECT obj_struct = {HandleToUint64(renderPass), kVulkanObjectTypeRenderPass};
+ const VulkanTypedHandle obj_struct(renderPass, kVulkanObjectTypeRenderPass);
InvalidateCommandBuffers(rp_state->cb_bindings, obj_struct);
renderPassMap.erase(renderPass);
}
@@ -6064,7 +6070,7 @@
// Add bindings between the given cmd buffer & framebuffer and the framebuffer's children
void CoreChecks::AddFramebufferBinding(CMD_BUFFER_STATE *cb_state, FRAMEBUFFER_STATE *fb_state) {
- AddCommandBufferBinding(&fb_state->cb_bindings, {HandleToUint64(fb_state->framebuffer), kVulkanObjectTypeFramebuffer},
+ AddCommandBufferBinding(&fb_state->cb_bindings, VulkanTypedHandle(fb_state->framebuffer, kVulkanObjectTypeFramebuffer),
cb_state);
const uint32_t attachmentCount = fb_state->createInfo.attachmentCount;
@@ -6334,7 +6340,7 @@
}
cb_state->lastBound[pipelineBindPoint].pipeline_state = pipe_state;
SetPipelineState(pipe_state);
- AddCommandBufferBinding(&pipe_state->cb_bindings, {HandleToUint64(pipeline), kVulkanObjectTypePipeline}, cb_state);
+ AddCommandBufferBinding(&pipe_state->cb_bindings, VulkanTypedHandle(pipeline, kVulkanObjectTypePipeline), cb_state);
}
bool CoreChecks::PreCallValidateCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
@@ -6927,10 +6933,10 @@
if (0 == (qfp.queueFlags & flag_mask.at(bind_point))) {
const std::string &error = bind_errors.at(bind_point);
auto cb_u64 = HandleToUint64(cb_state->commandBuffer);
- auto cp_u64 = HandleToUint64(cb_state->createInfo.commandPool);
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, cb_u64,
error, "%s: CommandBuffer %s was allocated from VkCommandPool %s that does not support bindpoint %s.",
- func_name, report_data->FormatHandle(cb_u64).c_str(), report_data->FormatHandle(cp_u64).c_str(),
+ func_name, report_data->FormatHandle(cb_state->commandBuffer).c_str(),
+ report_data->FormatHandle(cb_state->createInfo.commandPool).c_str(),
string_VkPipelineBindPoint(bind_point));
}
}
@@ -6968,7 +6974,7 @@
layout_u64, "VUID-vkCmdPushDescriptorSetKHR-set-00365",
"%s: Set index %" PRIu32
" does not match push descriptor set layout index for VkPipelineLayout %s.",
- func_name, set, report_data->FormatHandle(layout_u64).c_str());
+ func_name, set, report_data->FormatHandle(layout).c_str());
} else {
// Create an empty proxy in order to use the existing descriptor set update validation
// TODO move the validation (like this) that doesn't need descriptor set state to the DSL object so we
@@ -6981,7 +6987,7 @@
skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, layout_u64,
"VUID-vkCmdPushDescriptorSetKHR-set-00364",
"%s: Set index %" PRIu32 " is outside of range for VkPipelineLayout %s (set < %" PRIu32 ").", func_name,
- set, report_data->FormatHandle(layout_u64).c_str(), static_cast<uint32_t>(set_layouts.size()));
+ set, report_data->FormatHandle(layout).c_str(), static_cast<uint32_t>(set_layouts.size()));
}
}
@@ -7185,7 +7191,7 @@
CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
auto event_state = GetEventState(event);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(event), kVulkanObjectTypeEvent}, cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->events.push_back(event);
@@ -7213,7 +7219,7 @@
CMD_BUFFER_STATE *cb_state = GetCBState(commandBuffer);
auto event_state = GetEventState(event);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(event), kVulkanObjectTypeEvent}, cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(event, kVulkanObjectTypeEvent), cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->events.push_back(event);
@@ -7332,7 +7338,8 @@
// Verify image barrier image state and that the image is consistent with FB image
bool CoreChecks::ValidateImageBarrierImage(const char *funcName, CMD_BUFFER_STATE const *cb_state, VkFramebuffer framebuffer,
uint32_t active_subpass, const safe_VkSubpassDescription2KHR &sub_desc,
- uint64_t rp_handle, uint32_t img_index, const VkImageMemoryBarrier &img_barrier) {
+ const VulkanTypedHandle &rp_handle, uint32_t img_index,
+ const VkImageMemoryBarrier &img_barrier) {
bool skip = false;
const auto &fb_state = GetFramebufferState(framebuffer);
assert(fb_state);
@@ -7376,8 +7383,8 @@
}
}
if (!sub_image_found) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-image-02635",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-image-02635",
"%s: Barrier pImageMemoryBarriers[%d].image (%s) is not referenced by the VkSubpassDescription for "
"active subpass (%d) of current renderPass (%s).",
funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
@@ -7389,7 +7396,7 @@
report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, fb_handle,
"VUID-vkCmdPipelineBarrier-image-02635",
"%s: Barrier pImageMemoryBarriers[%d].image (%s) does not match an image from the current framebuffer (%s).", funcName,
- img_index, report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_handle).c_str());
+ img_index, report_data->FormatHandle(img_bar_image).c_str(), report_data->FormatHandle(fb_state->framebuffer).c_str());
}
if (img_barrier.oldLayout != img_barrier.newLayout) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
@@ -7400,8 +7407,8 @@
string_VkImageLayout(img_barrier.newLayout));
} else {
if (sub_image_found && sub_image_layout != img_barrier.oldLayout) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-oldLayout-02636",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-oldLayout-02636",
"%s: Barrier pImageMemoryBarriers[%d].image (%s) is referenced by the VkSubpassDescription for active "
"subpass (%d) of current renderPass (%s) as having layout %s, but image barrier has layout %s.",
funcName, img_index, report_data->FormatHandle(img_bar_image).c_str(), active_subpass,
@@ -7414,7 +7421,7 @@
// Validate image barriers within a renderPass
bool CoreChecks::ValidateRenderPassImageBarriers(const char *funcName, CMD_BUFFER_STATE *cb_state, uint32_t active_subpass,
- const safe_VkSubpassDescription2KHR &sub_desc, uint64_t rp_handle,
+ const safe_VkSubpassDescription2KHR &sub_desc, const VulkanTypedHandle &rp_handle,
const safe_VkSubpassDependency2KHR *dependencies,
const std::vector<uint32_t> &self_dependencies, uint32_t image_mem_barrier_count,
const VkImageMemoryBarrier *image_barriers) {
@@ -7434,14 +7441,14 @@
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pImageMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency "
"srcAccessMask of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
funcName, i, img_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pImageMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency "
"dstAccessMask of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
@@ -7450,8 +7457,8 @@
}
if (VK_QUEUE_FAMILY_IGNORED != img_barrier.srcQueueFamilyIndex ||
VK_QUEUE_FAMILY_IGNORED != img_barrier.dstQueueFamilyIndex) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-srcQueueFamilyIndex-01182",
"%s: Barrier pImageMemoryBarriers[%d].srcQueueFamilyIndex is %d and "
"pImageMemoryBarriers[%d].dstQueueFamilyIndex is %d but both must be VK_QUEUE_FAMILY_IGNORED.",
funcName, i, img_barrier.srcQueueFamilyIndex, i, img_barrier.dstQueueFamilyIndex);
@@ -7482,11 +7489,11 @@
bool skip = false;
const auto rp_state = cb_state->activeRenderPass;
const auto active_subpass = cb_state->activeSubpass;
- auto rp_handle = HandleToUint64(rp_state->renderPass);
+ const VulkanTypedHandle rp_handle(rp_state->renderPass, kVulkanObjectTypeRenderPass);
const auto &self_dependencies = rp_state->self_dependencies[active_subpass];
const auto &dependencies = rp_state->createInfo.pDependencies;
if (self_dependencies.size() == 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barriers cannot be set during subpass %d of renderPass %s with no self-dependency specified.",
funcName, active_subpass, report_data->FormatHandle(rp_handle).c_str());
@@ -7508,15 +7515,15 @@
if (!stage_mask_match) {
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier srcStageMask(0x%X) is not a subset of VkSubpassDependency srcStageMask of any "
"self-dependency of subpass %d of renderPass %s for which dstStageMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
funcName, src_stage_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-pDependencies-02285",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier dstStageMask(0x%X) is not a subset of VkSubpassDependency dstStageMask of any "
"self-dependency of subpass %d of renderPass %s for which srcStageMask is also a subset. "
"Candidate VkSubpassDependency are pDependencies entries [%s].",
@@ -7525,8 +7532,8 @@
}
if (0 != buffer_mem_barrier_count) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
- "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ rp_handle.handle, "VUID-vkCmdPipelineBarrier-bufferMemoryBarrierCount-01178",
"%s: bufferMemoryBarrierCount is non-zero (%d) for subpass %d of renderPass %s.", funcName,
buffer_mem_barrier_count, active_subpass, report_data->FormatHandle(rp_handle).c_str());
}
@@ -7545,7 +7552,7 @@
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pMemoryBarriers[%d].srcAccessMask(0x%X) is not a subset of VkSubpassDependency srcAccessMask "
"for any self-dependency of subpass %d of renderPass %s for which dstAccessMask is also a subset. "
@@ -7553,7 +7560,7 @@
funcName, i, mb_src_access_mask, active_subpass, report_data->FormatHandle(rp_handle).c_str(),
self_dep_ss.str().c_str());
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: Barrier pMemoryBarriers[%d].dstAccessMask(0x%X) is not a subset of VkSubpassDependency dstAccessMask "
"for any self-dependency of subpass %d of renderPass %s for which srcAccessMask is also a subset. "
@@ -7576,7 +7583,7 @@
std::stringstream self_dep_ss;
stream_join(self_dep_ss, ", ", self_dependencies);
skip |= log_msg(
- report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle,
+ report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, rp_handle.handle,
"VUID-vkCmdPipelineBarrier-pDependencies-02285",
"%s: dependencyFlags param (0x%X) does not equal VkSubpassDependency dependencyFlags value for any "
"self-dependency of subpass %d of renderPass %s. Candidate VkSubpassDependency are pDependencies entries [%s].",
@@ -8219,7 +8226,7 @@
for (uint32_t i = 0; i < eventCount; ++i) {
auto event_state = GetEventState(pEvents[i]);
if (event_state) {
- AddCommandBufferBinding(&event_state->cb_bindings, {HandleToUint64(pEvents[i]), kVulkanObjectTypeEvent}, cb_state);
+ AddCommandBufferBinding(&event_state->cb_bindings, VulkanTypedHandle(pEvents[i], kVulkanObjectTypeEvent), cb_state);
event_state->cb_bindings.insert(cb_state);
}
cb_state->waitedEvents.insert(pEvents[i]);
@@ -8355,7 +8362,7 @@
cb_state->activeQueries.insert(query_obj);
cb_state->startedQueries.insert(query_obj);
AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
- {HandleToUint64(query_obj.pool), kVulkanObjectTypeQueryPool}, cb_state);
+ VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
}
bool CoreChecks::PreCallValidateCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags) {
@@ -8402,7 +8409,7 @@
cb_state->activeQueries.erase(query_obj);
cb_state->queryUpdates.emplace_back([=](VkQueue q) { return SetQueryState(q, cb_state->commandBuffer, query_obj, true); });
AddCommandBufferBinding(&GetQueryPoolState(query_obj.pool)->cb_bindings,
- {HandleToUint64(query_obj.pool), kVulkanObjectTypeQueryPool}, cb_state);
+ VulkanTypedHandle(query_obj.pool, kVulkanObjectTypeQueryPool), cb_state);
}
void CoreChecks::PostCallRecordCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
@@ -8432,7 +8439,7 @@
cb_state->waitedEventsBeforeQueryReset[query] = cb_state->waitedEvents;
cb_state->queryUpdates.emplace_back([=](VkQueue q) { return SetQueryState(q, commandBuffer, query, false); });
}
- AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
+ AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
cb_state);
}
@@ -8493,7 +8500,7 @@
auto dst_buff_state = GetBufferState(dstBuffer);
AddCommandBufferBindingBuffer(cb_state, dst_buff_state);
cb_state->queryUpdates.emplace_back([=](VkQueue q) { return ValidateQuery(q, cb_state, queryPool, firstQuery, queryCount); });
- AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, {HandleToUint64(queryPool), kVulkanObjectTypeQueryPool},
+ AddCommandBufferBinding(&GetQueryPoolState(queryPool)->cb_bindings, VulkanTypedHandle(queryPool, kVulkanObjectTypeQueryPool),
cb_state);
}
@@ -9966,7 +9973,7 @@
AddFramebufferBinding(cb_state, framebuffer);
// Connect this RP to cmdBuffer
AddCommandBufferBinding(&render_pass_state->cb_bindings,
- {HandleToUint64(render_pass_state->renderPass), kVulkanObjectTypeRenderPass}, cb_state);
+ VulkanTypedHandle(render_pass_state->renderPass, kVulkanObjectTypeRenderPass), cb_state);
// transition attachments to the correct layouts for beginning of renderPass and first subpass
TransitionBeginRenderPassLayouts(cb_state, render_pass_state, framebuffer);
@@ -10560,7 +10567,7 @@
if (image_state) {
// Track objects tied to memory
uint64_t image_handle = HandleToUint64(image);
- skip = ValidateSetMemBinding(mem, image_handle, kVulkanObjectTypeImage, api_name);
+ skip = ValidateSetMemBinding(mem, VulkanTypedHandle(image, kVulkanObjectTypeImage), api_name);
#ifdef VK_USE_PLATFORM_ANDROID_KHR
if (image_state->external_format_android) {
if (image_state->memory_requirements_checked) {
@@ -10650,8 +10657,7 @@
}
// Track objects tied to memory
- uint64_t image_handle = HandleToUint64(image);
- SetMemBinding(mem, image_state, memoryOffset, image_handle, kVulkanObjectTypeImage);
+ SetMemBinding(mem, image_state, memoryOffset, VulkanTypedHandle(image, kVulkanObjectTypeImage));
}
}
@@ -10890,14 +10896,14 @@
for (uint32_t k = 0; k < bindInfo.pBufferBinds[j].bindCount; k++) {
auto sparse_binding = bindInfo.pBufferBinds[j].pBinds[k];
SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
- HandleToUint64(bindInfo.pBufferBinds[j].buffer), kVulkanObjectTypeBuffer);
+ VulkanTypedHandle(bindInfo.pBufferBinds[j].buffer, kVulkanObjectTypeBuffer));
}
}
for (uint32_t j = 0; j < bindInfo.imageOpaqueBindCount; j++) {
for (uint32_t k = 0; k < bindInfo.pImageOpaqueBinds[j].bindCount; k++) {
auto sparse_binding = bindInfo.pImageOpaqueBinds[j].pBinds[k];
SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, sparse_binding.size},
- HandleToUint64(bindInfo.pImageOpaqueBinds[j].image), kVulkanObjectTypeImage);
+ VulkanTypedHandle(bindInfo.pImageOpaqueBinds[j].image, kVulkanObjectTypeImage));
}
}
for (uint32_t j = 0; j < bindInfo.imageBindCount; j++) {
@@ -10906,7 +10912,7 @@
// TODO: This size is broken for non-opaque bindings, need to update to comprehend full sparse binding data
VkDeviceSize size = sparse_binding.extent.depth * sparse_binding.extent.height * sparse_binding.extent.width * 4;
SetSparseMemBinding({sparse_binding.memory, sparse_binding.memoryOffset, size},
- HandleToUint64(bindInfo.pImageBinds[j].image), kVulkanObjectTypeImage);
+ VulkanTypedHandle(bindInfo.pImageBinds[j].image, kVulkanObjectTypeImage));
}
}
@@ -10982,7 +10988,7 @@
bool skip = false;
SEMAPHORE_STATE *sema_node = GetSemaphoreState(semaphore);
if (sema_node) {
- VK_OBJECT obj_struct = {HandleToUint64(semaphore), kVulkanObjectTypeSemaphore};
+ VK_OBJECT obj_struct(semaphore, kVulkanObjectTypeSemaphore);
skip |= ValidateObjectNotInUse(sema_node, obj_struct, caller_name, kVUIDUndefined);
}
return skip;
@@ -11498,7 +11504,7 @@
}
imageSubresourceMap.erase(image_sub);
}
- ClearMemoryObjectBindings(HandleToUint64(swapchain_image), kVulkanObjectTypeSwapchainKHR);
+ ClearMemoryObjectBindings(VulkanTypedHandle(swapchain_image, kVulkanObjectTypeImage));
EraseQFOImageRelaseBarriers(swapchain_image);
imageMap.erase(swapchain_image);
}
diff --git a/layers/core_validation.h b/layers/core_validation.h
index 7d42430..1d45593 100644
--- a/layers/core_validation.h
+++ b/layers/core_validation.h
@@ -261,7 +261,7 @@
PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState(VkPhysicalDevice phys);
PHYSICAL_DEVICE_STATE* GetPhysicalDeviceState();
SURFACE_STATE* GetSurfaceState(VkSurfaceKHR surface);
- BINDABLE* GetObjectMemBinding(uint64_t handle, VulkanObjectType type);
+ BINDABLE* GetObjectMemBinding(const VulkanTypedHandle& typed_handle);
template <typename ExtProp>
void GetPhysicalDeviceExtProperties(VkPhysicalDevice gpu, bool enabled, ExtProp* ext_prop) {
@@ -275,12 +275,12 @@
bool VerifyQueueStateToSeq(QUEUE_STATE* initial_queue, uint64_t initial_seq);
void ClearCmdBufAndMemReferences(CMD_BUFFER_STATE* cb_node);
- void ClearMemoryObjectBinding(uint64_t handle, VulkanObjectType type, VkDeviceMemory mem);
+ void ClearMemoryObjectBinding(const VulkanTypedHandle& typed_handle, VkDeviceMemory mem);
void ResetCommandBufferState(const VkCommandBuffer cb);
- void SetMemBinding(VkDeviceMemory mem, BINDABLE* mem_binding, VkDeviceSize memory_offset, uint64_t handle,
- VulkanObjectType type);
- bool ValidateSetMemBinding(VkDeviceMemory mem, uint64_t handle, VulkanObjectType type, const char* apiName);
- bool SetSparseMemBinding(MEM_BINDING binding, uint64_t handle, VulkanObjectType type);
+ void SetMemBinding(VkDeviceMemory mem, BINDABLE* mem_binding, VkDeviceSize memory_offset,
+ const VulkanTypedHandle& typed_handle);
+ bool ValidateSetMemBinding(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* apiName);
+ bool SetSparseMemBinding(MEM_BINDING binding, const VulkanTypedHandle& typed_handle);
bool ValidateDeviceQueueFamily(uint32_t queue_family, const char* cmd_name, const char* parameter_name, const char* error_code,
bool optional);
BASE_NODE* GetStateStructPtrFromObject(VK_OBJECT object_struct);
@@ -307,7 +307,7 @@
bool ValidatePipelineUnlocked(std::vector<std::unique_ptr<PIPELINE_STATE>> const& pPipelines, int pipelineIndex);
void FreeDescriptorSet(cvdescriptorset::DescriptorSet* descriptor_set);
void DeletePools();
- bool ValidImageBufferQueue(CMD_BUFFER_STATE* cb_node, const VK_OBJECT* object, VkQueue queue, uint32_t count,
+ bool ValidImageBufferQueue(CMD_BUFFER_STATE* cb_node, const VK_OBJECT& object, VkQueue queue, uint32_t count,
const uint32_t* indices);
bool ValidateFenceForSubmit(FENCE_STATE* pFence);
void AddMemObjInfo(void* object, const VkDeviceMemory mem, const VkMemoryAllocateInfo* pAllocateInfo);
@@ -343,8 +343,8 @@
const VkGraphicsPipelineCreateInfo* pipe_cis);
void AddFramebufferBinding(CMD_BUFFER_STATE* cb_state, FRAMEBUFFER_STATE* fb_state);
bool ValidateImageBarrierImage(const char* funcName, CMD_BUFFER_STATE const* cb_state, VkFramebuffer framebuffer,
- uint32_t active_subpass, const safe_VkSubpassDescription2KHR& sub_desc, uint64_t rp_handle,
- uint32_t img_index, const VkImageMemoryBarrier& img_barrier);
+ uint32_t active_subpass, const safe_VkSubpassDescription2KHR& sub_desc,
+ const VulkanTypedHandle& rp_handle, uint32_t img_index, const VkImageMemoryBarrier& img_barrier);
void RecordCmdBeginRenderPassState(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin,
const VkSubpassContents contents);
bool ValidateCmdBeginRenderPass(VkCommandBuffer commandBuffer, RenderPassCreateVersion rp_version,
@@ -382,7 +382,7 @@
const char* function, const char* error_code);
bool SetEventStageMask(VkQueue queue, VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
bool ValidateRenderPassImageBarriers(const char* funcName, CMD_BUFFER_STATE* cb_state, uint32_t active_subpass,
- const safe_VkSubpassDescription2KHR& sub_desc, uint64_t rp_handle,
+ const safe_VkSubpassDescription2KHR& sub_desc, const VulkanTypedHandle& rp_handle,
const safe_VkSubpassDependency2KHR* dependencies,
const std::vector<uint32_t>& self_dependencies, uint32_t image_mem_barrier_count,
const VkImageMemoryBarrier* image_barriers);
@@ -494,11 +494,11 @@
void AddCommandBufferBindingImageView(CMD_BUFFER_STATE*, IMAGE_VIEW_STATE*);
void AddCommandBufferBindingBuffer(CMD_BUFFER_STATE*, BUFFER_STATE*);
void AddCommandBufferBindingBufferView(CMD_BUFFER_STATE*, BUFFER_VIEW_STATE*);
- bool ValidateObjectNotInUse(BASE_NODE* obj_node, VK_OBJECT obj_struct, const char* caller_name, const char* error_code);
+ bool ValidateObjectNotInUse(BASE_NODE* obj_node, const VK_OBJECT& obj_struct, const char* caller_name, const char* error_code);
void InvalidateCommandBuffers(std::unordered_set<CMD_BUFFER_STATE*> const& cb_nodes, VK_OBJECT obj);
void RemoveImageMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE* mem_info);
void RemoveBufferMemoryRange(uint64_t handle, DEVICE_MEMORY_STATE* mem_info);
- void ClearMemoryObjectBindings(uint64_t handle, VulkanObjectType type);
+ void ClearMemoryObjectBindings(const VulkanTypedHandle& typed_handle);
bool ValidateCmdQueueFlags(const CMD_BUFFER_STATE* cb_node, const char* caller_name, VkQueueFlags flags,
const char* error_code);
bool InsideRenderPass(const CMD_BUFFER_STATE* pCB, const char* apiName, const char* msgCode);
@@ -663,7 +663,7 @@
const IMAGE_STATE* dst_img, const VkImageCopy* region, const uint32_t i,
const char* function);
bool ValidateIdleBuffer(VkBuffer buffer);
- bool ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, uint64_t obj_handle, VulkanObjectType obj_type,
+ bool ValidateUsageFlags(VkFlags actual, VkFlags desired, VkBool32 strict, const VulkanTypedHandle& typed_handle,
const char* msgCode, char const* func_name, char const* usage_str);
bool ValidateImageSubresourceRange(const uint32_t image_mip_count, const uint32_t image_layer_count,
const VkImageSubresourceRange& subresourceRange, const char* cmd_name,
@@ -840,7 +840,7 @@
void UpdateCmdBufImageLayouts(CMD_BUFFER_STATE* pCB);
- bool VerifyBoundMemoryIsValid(VkDeviceMemory mem, uint64_t handle, const char* api_name, const char* type_name,
+ bool VerifyBoundMemoryIsValid(VkDeviceMemory mem, const VulkanTypedHandle& typed_handle, const char* api_name,
const char* error_code);
bool ValidateLayoutVsAttachmentDescription(const debug_report_data* report_data, RenderPassCreateVersion rp_version,
diff --git a/layers/core_validation_types.h b/layers/core_validation_types.h
index 6ce39b9..ece49e4 100644
--- a/layers/core_validation_types.h
+++ b/layers/core_validation_types.h
@@ -25,6 +25,7 @@
#ifndef CORE_VALIDATION_TYPES_H_
#define CORE_VALIDATION_TYPES_H_
+#include "cast_utils.h"
#include "hash_vk_types.h"
#include "sparse_containers.h"
#include "vk_safe_struct.h"
@@ -111,13 +112,10 @@
return (queue_family_index == VK_QUEUE_FAMILY_EXTERNAL_KHR) || (queue_family_index == VK_QUEUE_FAMILY_FOREIGN_EXT);
}
-// Generic wrapper for vulkan objects
-struct VK_OBJECT {
- uint64_t handle;
- VulkanObjectType type;
-};
+// Generic wrapper for vulkan objects -- using the one from vk_object_types.h now.
+typedef VulkanTypedHandle VK_OBJECT;
-inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
+inline bool operator==(const VK_OBJECT &a, const VK_OBJECT &b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
namespace std {
template <>
@@ -348,6 +346,14 @@
std::unordered_set<MEMORY_RANGE *> aliases;
};
+static inline VulkanTypedHandle MemoryRangeTypedHandle(const MEMORY_RANGE &range) {
+ // TODO: Convert MEMORY_RANGE to use VulkanTypedHandle internally
+ if (range.image) {
+ return VulkanTypedHandle(CastFromUint64<VkImage>(range.handle), kVulkanObjectTypeImage);
+ }
+ return VulkanTypedHandle(CastFromUint64<VkBuffer>(range.handle), kVulkanObjectTypeBuffer);
+}
+
// Data struct for tracking memory object
struct DEVICE_MEMORY_STATE : public BASE_NODE {
void *object; // Dispatchable object used to create this memory (device of swapchain)
diff --git a/layers/descriptor_sets.cpp b/layers/descriptor_sets.cpp
index b79e5a0..c9c07b4 100644
--- a/layers/descriptor_sets.cpp
+++ b/layers/descriptor_sets.cpp
@@ -975,7 +975,7 @@
}
// Set is being deleted or updates so invalidate all bound cmd buffers
void cvdescriptorset::DescriptorSet::InvalidateBoundCmdBuffers() {
- device_data_->InvalidateCommandBuffers(cb_bindings, {HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
+ device_data_->InvalidateCommandBuffers(cb_bindings, VulkanTypedHandle(set_, kVulkanObjectTypeDescriptorSet));
}
// Loop through the write updates to do for a push descriptor set, ignoring dstSet
@@ -1240,9 +1240,9 @@
// bind cb to this descriptor set
cb_bindings.insert(cb_node);
// Add bindings for descriptor set, the set's pool, and individual objects in the set
- cb_node->object_bindings.insert({HandleToUint64(set_), kVulkanObjectTypeDescriptorSet});
+ cb_node->object_bindings.emplace(set_, kVulkanObjectTypeDescriptorSet);
pool_state_->cb_bindings.insert(cb_node);
- cb_node->object_bindings.insert({HandleToUint64(pool_state_->pool), kVulkanObjectTypeDescriptorPool});
+ cb_node->object_bindings.emplace(pool_state_->pool, kVulkanObjectTypeDescriptorPool);
// For the active slots, use set# to look up descriptorSet from boundDescriptorSets, and bind all of that descriptor set's
// resources
for (auto binding_req_pair : binding_req_map) {
diff --git a/layers/vk_layer_logging.h b/layers/vk_layer_logging.h
index 22f1dd0..3d80af6 100644
--- a/layers/vk_layer_logging.h
+++ b/layers/vk_layer_logging.h
@@ -211,6 +211,10 @@
// Backwards compatible path for entry points that pass uint64_t's
std::string FormatHandle(uint64_t h) const { return FormatHandle("", h); }
+ std::string FormatHandle(const VulkanTypedHandle &handle) const {
+ return FormatHandle(object_string[handle.type], handle.handle);
+ }
+
template <typename HANDLE_T>
std::string FormatHandle(HANDLE_T h) const {
return FormatHandle(VkHandleInfo<HANDLE_T>::Typename(), HandleToUint64(h));