layers: Pass common data between pre/post AllocDescriptorSets

With the break between PreValidate* and PostRecord* calls in the layers
we can suffer having to do some repeat work in the Post step. In order
to prevent this, this CL slightly modifies the interface to pass common
data between the pre/post calls in a custom AllocateDescriptorSetsData
struct.

I initially attempted to fill this data in a separate function that
would preceed the PreValidate* call, but such a function would need to
include some validation as it includes map checks which may fail.
The simplest solution, then, seems to be passing a ptr to the common
data to the PreValidate* function who then fills the data. If the
validation and call down the chain succeed, the PostRecord* function
then takes a ptr to the common data to prevent having to redo the
work that was done at validation time.
diff --git a/layers/core_validation.cpp b/layers/core_validation.cpp
index e7201a7..21d3b07 100644
--- a/layers/core_validation.cpp
+++ b/layers/core_validation.cpp
@@ -5869,26 +5869,30 @@
 // Ensure the pool contains enough descriptors and descriptor sets to satisfy
 // an allocation request. Fills requiredDescriptorsByType with the total number
 // of descriptors of each type required, for later update.
-static bool PreCallValidateAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo) {
+static bool PreCallValidateAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+                                                  cvdescriptorset::AllocateDescriptorSetsData *common_data) {
     // All state checks for AllocateDescriptorSets is done in single function
     return cvdescriptorset::ValidateAllocateDescriptorSets(dev_data->report_data, pAllocateInfo, dev_data->descriptorSetLayoutMap,
-                                                           dev_data->descriptorPoolMap);
+                                                           dev_data->descriptorPoolMap, common_data);
 }
 // Allocation state was good and call down chain was made so update state based on allocating descriptor sets
 static void PostCallRecordAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo,
-                                                 VkDescriptorSet *pDescriptorSets) {
+                                                 VkDescriptorSet *pDescriptorSets,
+                                                 const cvdescriptorset::AllocateDescriptorSetsData *common_data) {
     // All the updates are contained in a single cvdescriptorset function
     cvdescriptorset::PerformAllocateDescriptorSets(
-        pAllocateInfo, pDescriptorSets, &dev_data->descriptorPoolMap, &dev_data->setMap, dev_data->descriptorSetLayoutMap,
-        dev_data->bufferMap, dev_data->memObjMap, dev_data->bufferViewMap, dev_data->samplerMap, dev_data->imageViewMap,
-        dev_data->imageMap, dev_data->device_extensions.imageToSwapchainMap, dev_data->device_extensions.swapchainMap);
+        pAllocateInfo, pDescriptorSets, common_data, &dev_data->descriptorPoolMap, &dev_data->setMap,
+        dev_data->descriptorSetLayoutMap, dev_data->bufferMap, dev_data->memObjMap, dev_data->bufferViewMap, dev_data->samplerMap,
+        dev_data->imageViewMap, dev_data->imageMap, dev_data->device_extensions.imageToSwapchainMap,
+        dev_data->device_extensions.swapchainMap);
 }
 
 VKAPI_ATTR VkResult VKAPI_CALL
 AllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets) {
     layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
     std::unique_lock<std::mutex> lock(global_lock);
-    bool skip_call = PreCallValidateAllocateDescriptorSets(dev_data, pAllocateInfo);
+    cvdescriptorset::AllocateDescriptorSetsData common_data(pAllocateInfo->descriptorSetCount);
+    bool skip_call = PreCallValidateAllocateDescriptorSets(dev_data, pAllocateInfo, &common_data);
     lock.unlock();
 
     if (skip_call)
@@ -5898,7 +5902,7 @@
 
     if (VK_SUCCESS == result) {
         lock.lock();
-        PostCallRecordAllocateDescriptorSets(dev_data, pAllocateInfo, pDescriptorSets);
+        PostCallRecordAllocateDescriptorSets(dev_data, pAllocateInfo, pDescriptorSets, &common_data);
         lock.unlock();
     }
     return result;
diff --git a/layers/descriptor_sets.cpp b/layers/descriptor_sets.cpp
index ede1b5a..5cb38e5 100644
--- a/layers/descriptor_sets.cpp
+++ b/layers/descriptor_sets.cpp
@@ -260,6 +260,9 @@
     return true;
 }
 
+cvdescriptorset::AllocateDescriptorSetsData::AllocateDescriptorSetsData(uint32_t count)
+    : required_descriptors_by_type{}, layout_nodes(count, nullptr) {}
+
 cvdescriptorset::DescriptorSet::DescriptorSet(const VkDescriptorSet set, const DescriptorSetLayout *layout,
                                               const std::unordered_map<VkBuffer, BUFFER_NODE> *buffer_map,
                                               const std::unordered_map<VkDeviceMemory, DEVICE_MEM_INFO> *memory_map,
@@ -1226,9 +1229,8 @@
 bool cvdescriptorset::ValidateAllocateDescriptorSets(
     const debug_report_data *report_data, const VkDescriptorSetAllocateInfo *p_alloc_info,
     const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &set_layout_map,
-    const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &pool_map) {
+    const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &pool_map, AllocateDescriptorSetsData *ds_data) {
     bool skip_call = false;
-    uint32_t requiredDescriptorsByType[VK_DESCRIPTOR_TYPE_RANGE_SIZE]{};
 
     for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
         auto layout_it = set_layout_map.find(p_alloc_info->pSetLayouts[i]);
@@ -1238,12 +1240,14 @@
                         reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]), __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
                         "Unable to find set layout node for layout 0x%" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call",
                         reinterpret_cast<const uint64_t &>(p_alloc_info->pSetLayouts[i]));
-        }
-        // Count total descriptors required per type
-        for (uint32_t j = 0; j < layout_it->second->GetBindingCount(); ++j) {
-            const auto &binding_layout = layout_it->second->GetDescriptorSetLayoutBindingPtrFromIndex(j);
-            uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
-            requiredDescriptorsByType[typeIndex] += binding_layout->descriptorCount;
+        } else {
+            ds_data->layout_nodes[i] = layout_it->second;
+            // Count total descriptors required per type
+            for (uint32_t j = 0; j < layout_it->second->GetBindingCount(); ++j) {
+                const auto &binding_layout = layout_it->second->GetDescriptorSetLayoutBindingPtrFromIndex(j);
+                uint32_t typeIndex = static_cast<uint32_t>(binding_layout->descriptorType);
+                ds_data->required_descriptors_by_type[typeIndex] += binding_layout->descriptorCount;
+            }
         }
     }
     auto pool_it = pool_map.find(p_alloc_info->descriptorPool);
@@ -1265,13 +1269,13 @@
         }
         // Determine whether descriptor counts are satisfiable
         for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
-            if (requiredDescriptorsByType[i] > pool_it->second->availableDescriptorTypeCount[i]) {
+            if (ds_data->required_descriptors_by_type[i] > pool_it->second->availableDescriptorTypeCount[i]) {
                 skip_call |=
                     log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
                             reinterpret_cast<const uint64_t &>(pool_it->second->pool), __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY,
                             "DS", "Unable to allocate %u descriptors of type %s from pool 0x%" PRIxLEAST64
                                   ". This pool only has %d descriptors of this type remaining.",
-                            requiredDescriptorsByType[i], string_VkDescriptorType(VkDescriptorType(i)),
+                            ds_data->required_descriptors_by_type[i], string_VkDescriptorType(VkDescriptorType(i)),
                             reinterpret_cast<uint64_t &>(pool_it->second->pool), pool_it->second->availableDescriptorTypeCount[i]);
             }
         }
@@ -1281,7 +1285,7 @@
 // Decrement allocated sets from the pool and insert new sets into set_map
 void cvdescriptorset::PerformAllocateDescriptorSets(
     const VkDescriptorSetAllocateInfo *p_alloc_info, const VkDescriptorSet *descriptor_sets,
-    std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> *pool_map,
+    const AllocateDescriptorSetsData *ds_data, std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> *pool_map,
     std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *set_map,
     const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &layout_map,
     const std::unordered_map<VkBuffer, BUFFER_NODE> &buffer_map,
@@ -1293,22 +1297,18 @@
     const std::unordered_map<VkImage, VkSwapchainKHR> &image_to_swapchain_map,
     const std::unordered_map<VkSwapchainKHR, SWAPCHAIN_NODE *> &swapchain_map) {
     auto pool_state = (*pool_map)[p_alloc_info->descriptorPool];
-    /* Account for sets allocated from pool */
+    /* Account for sets and individual descriptors allocated from pool */
     pool_state->availableSets -= p_alloc_info->descriptorSetCount;
+    for (uint32_t i = 0; i < VK_DESCRIPTOR_TYPE_RANGE_SIZE; i++) {
+        pool_state->availableDescriptorTypeCount[i] -= ds_data->required_descriptors_by_type[i];
+    }
     /* Create tracking object for each descriptor set; insert into
      * global map and the pool's set.
      */
     for (uint32_t i = 0; i < p_alloc_info->descriptorSetCount; i++) {
-        auto layout_state = layout_map.find(p_alloc_info->pSetLayouts[i])->second;
-        // Account for individual descriptors allocated from pool
-        for (uint32_t j = 0; j < layout_state->GetBindingCount(); ++j) {
-            const auto &binding_layout = layout_state->GetDescriptorSetLayoutBindingPtrFromIndex(j);
-            uint32_t type_index = static_cast<uint32_t>(binding_layout->descriptorType);
-            pool_state->availableDescriptorTypeCount[type_index] -= binding_layout->descriptorCount;
-        }
-        auto new_ds =
-            new cvdescriptorset::DescriptorSet(descriptor_sets[i], layout_state, &buffer_map, &mem_obj_map, &buffer_view_map,
-                                               &sampler_map, &image_view_map, &image_map, &image_to_swapchain_map, &swapchain_map);
+        auto new_ds = new cvdescriptorset::DescriptorSet(descriptor_sets[i], ds_data->layout_nodes[i], &buffer_map, &mem_obj_map,
+                                                         &buffer_view_map, &sampler_map, &image_view_map, &image_map,
+                                                         &image_to_swapchain_map, &swapchain_map);
 
         pool_state->sets.insert(new_ds);
         new_ds->in_use.store(0);
diff --git a/layers/descriptor_sets.h b/layers/descriptor_sets.h
index af062dd..bb5907c 100644
--- a/layers/descriptor_sets.h
+++ b/layers/descriptor_sets.h
@@ -245,6 +245,12 @@
     VkDeviceSize offset_;
     VkDeviceSize range_;
 };
+// Structs to contain common elements that need to be shared between Validate* and Perform* calls below
+struct AllocateDescriptorSetsData {
+    uint32_t required_descriptors_by_type[VK_DESCRIPTOR_TYPE_RANGE_SIZE];
+    std::vector<cvdescriptorset::DescriptorSetLayout const *> layout_nodes;
+    AllocateDescriptorSetsData(uint32_t);
+};
 // Helper functions for descriptor set functions that cross multiple sets
 // "Validate" will make sure an update is ok without actually performing it
 bool ValidateUpdateDescriptorSets(const debug_report_data *,
@@ -256,17 +262,21 @@
 // Validate that Allocation state is ok
 bool ValidateAllocateDescriptorSets(const debug_report_data *, const VkDescriptorSetAllocateInfo *,
                                     const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &,
-                                    const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &);
+                                    const std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> &,
+                                    AllocateDescriptorSetsData *);
 // Update state based on allocating new descriptorsets
-void PerformAllocateDescriptorSets(
-    const VkDescriptorSetAllocateInfo *, const VkDescriptorSet *, std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> *,
-    std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *,
-    const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &,
-    const std::unordered_map<VkBuffer, BUFFER_NODE> &, const std::unordered_map<VkDeviceMemory, DEVICE_MEM_INFO> &,
-    const std::unordered_map<VkBufferView, VkBufferViewCreateInfo> &,
-    const std::unordered_map<VkSampler, std::unique_ptr<SAMPLER_NODE>> &,
-    const std::unordered_map<VkImageView, VkImageViewCreateInfo> &, const std::unordered_map<VkImage, IMAGE_NODE> &,
-    const std::unordered_map<VkImage, VkSwapchainKHR> &, const std::unordered_map<VkSwapchainKHR, SWAPCHAIN_NODE *> &);
+void PerformAllocateDescriptorSets(const VkDescriptorSetAllocateInfo *, const VkDescriptorSet *, const AllocateDescriptorSetsData *,
+                                   std::unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> *,
+                                   std::unordered_map<VkDescriptorSet, cvdescriptorset::DescriptorSet *> *,
+                                   const std::unordered_map<VkDescriptorSetLayout, cvdescriptorset::DescriptorSetLayout *> &,
+                                   const std::unordered_map<VkBuffer, BUFFER_NODE> &,
+                                   const std::unordered_map<VkDeviceMemory, DEVICE_MEM_INFO> &,
+                                   const std::unordered_map<VkBufferView, VkBufferViewCreateInfo> &,
+                                   const std::unordered_map<VkSampler, std::unique_ptr<SAMPLER_NODE>> &,
+                                   const std::unordered_map<VkImageView, VkImageViewCreateInfo> &,
+                                   const std::unordered_map<VkImage, IMAGE_NODE> &,
+                                   const std::unordered_map<VkImage, VkSwapchainKHR> &,
+                                   const std::unordered_map<VkSwapchainKHR, SWAPCHAIN_NODE *> &);
 
 /*
  * DescriptorSet class