layers: Clean up poolMap access
Remove some unneeded passing of poolMap around and use getPoolNode()
function in DescriptorSet class.
diff --git a/layers/core_validation.cpp b/layers/core_validation.cpp
index 1f0d898..49324b8 100644
--- a/layers/core_validation.cpp
+++ b/layers/core_validation.cpp
@@ -2037,7 +2037,7 @@
return &it->second;
}
-static cvdescriptorset::DescriptorSetLayout const *getDescriptorSetLayout(layer_data const *my_data, VkDescriptorSetLayout dsLayout) {
+cvdescriptorset::DescriptorSetLayout const *getDescriptorSetLayout(layer_data const *my_data, VkDescriptorSetLayout dsLayout) {
auto it = my_data->descriptorSetLayoutMap.find(dsLayout);
if (it == my_data->descriptorSetLayoutMap.end()) {
return nullptr;
@@ -3166,7 +3166,7 @@
// Block of code at start here specifically for managing/tracking DSs
// Return Pool node ptr for specified pool or else NULL
-static DESCRIPTOR_POOL_NODE *getPoolNode(const layer_data *dev_data, const VkDescriptorPool pool) {
+DESCRIPTOR_POOL_NODE *getPoolNode(const layer_data *dev_data, const VkDescriptorPool pool) {
auto pool_it = dev_data->descriptorPoolMap.find(pool);
if (pool_it == dev_data->descriptorPoolMap.end()) {
return NULL;
@@ -5914,8 +5914,7 @@
static bool PreCallValidateAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo,
cvdescriptorset::AllocateDescriptorSetsData *common_data) {
// All state checks for AllocateDescriptorSets is done in single function
- return cvdescriptorset::ValidateAllocateDescriptorSets(dev_data->report_data, pAllocateInfo, dev_data->descriptorSetLayoutMap,
- dev_data->descriptorPoolMap, common_data);
+ return cvdescriptorset::ValidateAllocateDescriptorSets(dev_data->report_data, pAllocateInfo, dev_data, common_data);
}
// Allocation state was good and call down chain was made so update state based on allocating descriptor sets
static void PostCallRecordAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo,
diff --git a/layers/core_validation_types.h b/layers/core_validation_types.h
index 13aa21f..ce51dba 100644
--- a/layers/core_validation_types.h
+++ b/layers/core_validation_types.h
@@ -54,6 +54,7 @@
// Fwd declarations
namespace cvdescriptorset {
+class DescriptorSetLayout;
class DescriptorSet;
};
@@ -486,6 +487,8 @@
namespace core_validation {
struct layer_data;
cvdescriptorset::DescriptorSet *getSetNode(const layer_data *, VkDescriptorSet);
+cvdescriptorset::DescriptorSetLayout const *getDescriptorSetLayout(layer_data const *, VkDescriptorSetLayout);
+DESCRIPTOR_POOL_NODE *getPoolNode(const layer_data *, const VkDescriptorPool);
BUFFER_NODE *getBufferNode(const layer_data *, VkBuffer);
IMAGE_NODE *getImageNode(const layer_data *, VkImage);
DEVICE_MEM_INFO *getMemObjInfo(const layer_data *, VkDeviceMemory);
diff --git a/layers/descriptor_sets.cpp b/layers/descriptor_sets.cpp
index 8ccd334..a8e88cf 100644
--- a/layers/descriptor_sets.cpp
+++ b/layers/descriptor_sets.cpp
@@ -1209,32 +1209,32 @@
return true;
}
// Verify that the state at allocate time is correct, but don't actually allocate the sets yet
-bool cvdescriptorset::ValidateAllocateDescriptorSets(
- const debug_report_data *report_data, const VkDescriptorSetAllocateInfo *p_alloc_info,
- const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &set_layout_map,
- const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &pool_map, AllocateDescriptorSetsData *ds_data) {
+bool cvdescriptorset::ValidateAllocateDescriptorSets(const debug_report_data *report_data,
+ const VkDescriptorSetAllocateInfo *p_alloc_info,
+ const core_validation::layer_data *dev_data,
+ AllocateDescriptorSetsData *ds_data) {
bool skip_call = false;
for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
- auto layout_it = set_layout_map.find(p_alloc_info->pSetLayouts[i]);
- if (layout_it == set_layout_map.end()) {
+ auto layout = getDescriptorSetLayout(dev_data, p_alloc_info->pSetLayouts[i]);
+ if (!layout) {
skip_call |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]), __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
"Unable to find set layout node for layout 0x%" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call",
reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]));
} else {
- ds_data->layout_nodes[i] = layout_it->second;
+ ds_data->layout_nodes[i] = layout;
// Count total descriptors required per type
- for (uint32_t j = 0; j < layout_it->second->GetBindingCount(); ++j) {
- const auto &binding_layout = layout_it->second->GetDescriptorSetLayoutBindingPtrFromIndex(j);
+ for (uint32_t j = 0; j < layout->GetBindingCount(); ++j) {
+ const auto &binding_layout = layout->GetDescriptorSetLayoutBindingPtrFromIndex(j);
uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
}
}
}
- auto pool_it = pool_map.find(p_alloc_info->descriptorPool);
- if (pool_it == pool_map.end()) {
+ auto pool_node = getPoolNode(dev_data, p_alloc_info->descriptorPool);
+ if (!pool_node) {
skip_call |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
reinterpret_cast<const uint64_t &>(p_alloc_info->descriptorPool), __LINE__, DRAWSTATE_INVALID_POOL, "DS",
@@ -1242,24 +1242,23 @@
reinterpret_cast<const uint64_t &>(p_alloc_info->descriptorPool));
} else { // Make sure pool has all the available descriptors before calling down chain
// Track number of descriptorSets allowable in this pool
- if (pool_it->second->availableSets < p_alloc_info->descriptorSetCount) {
- skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
- reinterpret_cast<uint64_t &>(pool_it->second->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY,
- "DS", "Unable to allocate %u descriptorSets from pool 0x%" PRIxLEAST64
- ". This pool only has %d descriptorSets remaining.",
- p_alloc_info->descriptorSetCount, reinterpret_cast<uint64_t &>(pool_it->second->pool),
- pool_it->second->availableSets);
+ if (pool_node->availableSets < p_alloc_info->descriptorSetCount) {
+ skip_call |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ reinterpret_cast<uint64_t &>(pool_node->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY, "DS",
+ "Unable to allocate %u descriptorSets from pool 0x%" PRIxLEAST64
+ ". This pool only has %d descriptorSets remaining.",
+ p_alloc_info->descriptorSetCount, reinterpret_cast<uint64_t &>(pool_node->pool), pool_node->availableSets);
}
// Determine whether descriptor counts are satisfiable
for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
- if (ds_data->required_descriptors_by_type[i] > pool_it->second->availableDescriptorTypeCount[i]) {
- skip_call |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
- reinterpret_cast<const uint64_t &>(pool_it->second->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY,
- "DS", "Unable to allocate %u descriptors of type %s from pool 0x%" PRIxLEAST64
- ". This pool only has %d descriptors of this type remaining.",
- ds_data->required_descriptors_by_type[i], string_VkDescriptorType(VkDescriptorType(i)),
- reinterpret_cast<uint64_t &>(pool_it->second->pool), pool_it->second->availableDescriptorTypeCount[i]);
+ if (ds_data->required_descriptors_by_type[i] > pool_node->availableDescriptorTypeCount[i]) {
+ skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ reinterpret_cast<const uint64_t &>(pool_node->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY,
+ "DS", "Unable to allocate %u descriptors of type %s from pool 0x%" PRIxLEAST64
+ ". This pool only has %d descriptors of this type remaining.",
+ ds_data->required_descriptors_by_type[i], string_VkDescriptorType(VkDescriptorType(i)),
+ reinterpret_cast<uint64_t &>(pool_node->pool), pool_node->availableDescriptorTypeCount[i]);
}
}
}
diff --git a/layers/descriptor_sets.h b/layers/descriptor_sets.h
index 1b7ebb4..d53d97f 100644
--- a/layers/descriptor_sets.h
+++ b/layers/descriptor_sets.h
@@ -257,9 +257,7 @@
const VkCopyDescriptorSet *);
// Validate that Allocation state is ok
bool ValidateAllocateDescriptorSets(const debug_report_data *, const VkDescriptorSetAllocateInfo *,
- const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &,
- const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &,
- AllocateDescriptorSetsData *);
+ const core_validation::layer_data *, AllocateDescriptorSetsData *);
// Update state based on allocating new descriptorsets
void PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *, const VkDescriptorSet *, const AllocateDescriptorSetsData *,
std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> *,